diff --git a/.gitignore b/.gitignore index 861346a494..83706de085 100644 --- a/.gitignore +++ b/.gitignore @@ -53,11 +53,8 @@ parm/gdas/soca parm/gdas/jcb-gdas parm/gdas/jcb-algorithms parm/monitor -parm/post/AEROSOL_LUTS.dat parm/post/nam_micro_lookup.dat parm/post/optics_luts_DUST.dat -parm/post/gtg.config.gfs -parm/post/gtg_imprintings.txt parm/post/optics_luts_DUST_nasa.dat parm/post/optics_luts_NITR_nasa.dat parm/post/optics_luts_SALT.dat @@ -70,47 +67,14 @@ parm/post/optics_luts_WASO.dat parm/post/optics_luts_WASO_nasa.dat parm/post/params_grib2_tbl_new parm/post/post_tag_gfs128 -parm/post/post_tag_gfs65 -parm/post/postcntrl_gefs.xml -parm/post/postcntrl_gefs_aerosol.xml -parm/post/postcntrl_gefs_anl.xml -parm/post/postcntrl_gefs_chem.xml -parm/post/postcntrl_gefs_f00.xml -parm/post/postcntrl_gfs.xml -parm/post/postcntrl_gfs_anl.xml -parm/post/postcntrl_gfs_f00.xml -parm/post/postcntrl_gfs_f00_two.xml -parm/post/postcntrl_gfs_flux.xml -parm/post/postcntrl_gfs_flux_f00.xml -parm/post/postcntrl_gfs_goes.xml -parm/post/postcntrl_gfs_goes.xml-new -parm/post/postcntrl_gfs_two.xml -parm/post/postcntrl_gfs_wafs.xml -parm/post/postcntrl_gfs_wafs_anl.xml -parm/post/postxconfig-NT-GEFS-ANL.txt -parm/post/postxconfig-NT-GEFS-F00.txt -parm/post/postxconfig-NT-GEFS-F00-aerosol.txt -parm/post/postxconfig-NT-GEFS-WAFS.txt -parm/post/postxconfig-NT-GEFS-aerosol.txt -parm/post/postxconfig-NT-GEFS.txt -parm/post/postxconfig-NT-GFS-ANL.txt -parm/post/postxconfig-NT-GFS-F00-TWO.txt -parm/post/postxconfig-NT-GFS-F00.txt -parm/post/postxconfig-NT-GFS-FLUX-F00.txt -parm/post/postxconfig-NT-GFS-FLUX.txt -parm/post/postxconfig-NT-GFS-GOES.txt -parm/post/postxconfig-NT-GFS-TWO.txt -parm/post/postxconfig-NT-GFS-WAFS-ANL.txt -parm/post/postxconfig-NT-GFS-WAFS.txt -parm/post/postxconfig-NT-GFS.txt -parm/post/postxconfig-NT-gefs-aerosol.txt -parm/post/postxconfig-NT-gefs-chem.txt +parm/post/gfs +parm/post/gefs parm/post/ocean.csv parm/post/ice.csv parm/post/ocnicepost.nml.jinja2 parm/ufs/noahmptable.tbl parm/ufs/model_configure.IN -parm/ufs/model_configure_nest.IN +parm/ufs/input_global_nest.nml.IN parm/ufs/MOM_input_*.IN parm/ufs/MOM6_data_table.IN parm/ufs/ice_in.IN @@ -200,6 +164,7 @@ ush/month_name.sh ush/imsfv3_scf2ioda.py ush/atparse.bash ush/run_bufr2ioda.py +ush/bufr2ioda_insitu* # version files versions/build.ver diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 8ed4927c6b..2654adba29 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -78,7 +78,7 @@ pipeline { Machine = machine[0].toUpperCase() + machine.substring(1) echo "Getting Common Workspace for ${Machine}" ws("${custom_workspace[machine]}/${env.CHANGE_ID}") { - properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) + properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC', 'Gaea'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() CUSTOM_WORKSPACE = "${WORKSPACE}" sh(script: "mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS;rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS/*") @@ -267,7 +267,7 @@ pipeline { } } } - + stage( '5. FINALIZE' ) { agent { label NodeName[machine].toLowerCase() } @@ -297,6 +297,6 @@ pipeline { } } } - } + } } } diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml index c8365e12a0..e1b76f0db8 100644 --- a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml +++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml @@ -9,7 +9,7 @@ arguments: resdetocean: 5.0 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR - icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C48mx500 + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C48mx500/20240610 idate: 2021032412 edate: 2021032418 nens: 0 diff --git a/ci/cases/pr/C96C48_hybatmDA.yaml b/ci/cases/pr/C96C48_hybatmDA.yaml index d08374d4e0..7617e39217 100644 --- a/ci/cases/pr/C96C48_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_hybatmDA.yaml @@ -10,7 +10,7 @@ arguments: resensatmos: 48 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR - icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48/20240610 idate: 2021122018 edate: 2021122106 nens: 2 diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml similarity index 82% rename from ci/cases/pr/C96_atmaerosnowDA.yaml rename to ci/cases/pr/C96C48_hybatmaerosnowDA.yaml index 6eceffa27c..7387e55b24 100644 --- a/ci/cases/pr/C96_atmaerosnowDA.yaml +++ b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml @@ -6,12 +6,13 @@ arguments: pslot: {{ 'pslot' | getenv }} app: ATMA resdetatmos: 96 + resensatmos: 48 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR - icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48/20240610 idate: 2021122012 edate: 2021122100 - nens: 0 + nens: 2 gfs_cyc: 1 start: cold yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml index b5634642f3..0b5aa7b6ac 100644 --- a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml @@ -9,7 +9,7 @@ arguments: resensatmos: 48 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR - icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48/20240610 idate: 2024022318 edate: 2024022400 nens: 2 @@ -18,7 +18,6 @@ arguments: yaml: {{ HOMEgfs }}/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml skip_ci_on_hosts: - - hera - gaea - orion - hercules diff --git a/ci/cases/pr/C96_atm3DVar.yaml b/ci/cases/pr/C96_atm3DVar.yaml index 8a89ff25ec..e9e6c2b31c 100644 --- a/ci/cases/pr/C96_atm3DVar.yaml +++ b/ci/cases/pr/C96_atm3DVar.yaml @@ -8,7 +8,7 @@ arguments: resdetatmos: 96 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR - icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48/20240610 idate: 2021122018 edate: 2021122106 nens: 0 diff --git a/ci/cases/pr/C96_atm3DVar_extended.yaml b/ci/cases/pr/C96_atm3DVar_extended.yaml index a1ebab7b44..cdf69f04e0 100644 --- a/ci/cases/pr/C96_atm3DVar_extended.yaml +++ b/ci/cases/pr/C96_atm3DVar_extended.yaml @@ -8,7 +8,7 @@ arguments: resdetatmos: 96 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR - icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48/20240610 idate: 2021122018 edate: 2021122118 nens: 0 diff --git a/ci/scripts/utils/rocotostat.py b/ci/scripts/utils/rocotostat.py index 4afea5c8b5..230a1a6025 100755 --- a/ci/scripts/utils/rocotostat.py +++ b/ci/scripts/utils/rocotostat.py @@ -136,7 +136,7 @@ def rocoto_statcount(rocotostat): rocotostat_output = [line.split()[0:4] for line in rocotostat_output] rocotostat_output = [line for line in rocotostat_output if len(line) != 1] - status_cases = ['SUCCEEDED', 'FAIL', 'DEAD', 'RUNNING', 'SUBMITTING', 'QUEUED', 'UNAVAILABLE'] + status_cases = ['SUCCEEDED', 'FAIL', 'DEAD', 'RUNNING', 'SUBMITTING', 'QUEUED', 'UNAVAILABLE', 'UNKNOWN'] rocoto_status = {} status_counts = Counter(case for sublist in rocotostat_output for case in sublist) @@ -214,14 +214,14 @@ def is_stalled(rocoto_status): elif rocoto_status['DEAD'] > 0: error_return = rocoto_status['FAIL'] + rocoto_status['DEAD'] rocoto_state = 'FAIL' - elif 'UNAVAILABLE' in rocoto_status or 'UNKNOWN' in rocoto_status: + elif rocoto_status['UNAVAILABLE'] > 0 or rocoto_status['UNKNOWN'] > 0: rocoto_status = attempt_multiple_times(lambda: rocoto_statcount(rocotostat), 2, 120, ProcessError) error_return = 0 rocoto_state = 'RUNNING' - if 'UNAVAILABLE' in rocoto_status: + if rocoto_status['UNAVAILABLE'] > 0: error_return = rocoto_status['UNAVAILABLE'] rocoto_state = 'UNAVAILABLE' - if 'UNKNOWN' in rocoto_status: + if rocoto_status['UNKNOWN'] > 0: error_return += rocoto_status['UNKNOWN'] rocoto_state = 'UNKNOWN' elif is_stalled(rocoto_status): diff --git a/docs/source/init.rst b/docs/source/init.rst index 69e43f9bb0..e1cabdc8e0 100644 --- a/docs/source/init.rst +++ b/docs/source/init.rst @@ -125,14 +125,14 @@ Start date = 2021032312 ├── enkfgdas.20210323 │   ├── 06 │   │   ├── mem001 - │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   │   └── model -> ../../../gdas.20210323/06/model │   │   ├── mem002 - │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   │   └── model -> ../../../gdas.20210323/06/model │   │   ├── mem003 - │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   │   └── model -> ../../../gdas.20210323/06/model ... │   │   └── mem080 - │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   └── model -> ../../../gdas.20210323/06/model │   └── 12 │   ├── mem001 │   │   └── analysis @@ -153,7 +153,7 @@ Start date = 2021032312 │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc └── gdas.20210323 ├── 06 - │   └── model_data + │   └── model │   ├── atmos │   │   └── restart │   │   ├── 20210323.120000.ca_data.tile1.nc diff --git a/env/AWSPW.env b/env/AWSPW.env index 992281a1d7..7fe17d2492 100755 --- a/env/AWSPW.env +++ b/env/AWSPW.env @@ -27,7 +27,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + export APRUN="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing AWSPW.env" exit 2 @@ -43,6 +43,13 @@ if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then export APRUN_UFS="${launcher} -n ${ufs_ntasks}" unset nnodes ufs_ntasks +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + + export CFP_MP="YES" + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi + export wavempexec=${launcher} + export wave_mpmd=${mpmd_opt} + elif [[ "${step}" = "post" ]]; then export NTHREADS_NP=${NTHREADS1} @@ -52,6 +59,15 @@ elif [[ "${step}" = "post" ]]; then [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} export APRUN_DWN="${launcher} -n ${ntasks_dwn}" +elif [[ "${step}" = "atmos_products" ]]; then + + export USE_CFP="YES" # Use MPMD for downstream product generation on Hera + +elif [[ "${step}" = "oceanice_products" ]]; then + + export NTHREADS_OCNICEPOST=${NTHREADS1} + export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}" + elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} diff --git a/env/AZUREPW.env b/env/AZUREPW.env new file mode 100755 index 0000000000..706c659e95 --- /dev/null +++ b/env/AZUREPW.env @@ -0,0 +1,55 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + exit 1 + +fi + +step=$1 + +export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" + +# Configure MPI environment +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + +ulimit -s unlimited +ulimit -a + +# Calculate common variables +# Check first if the dependent variables are set +if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then + max_threads_per_task=$((max_tasks_per_node / tasks_per_node)) + NTHREADSmax=${threads_per_task:-${max_threads_per_task}} + NTHREADS1=${threads_per_task:-1} + [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} + [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} + APRUN="${launcher} -n ${ntasks}" +else + echo "ERROR config.resources must be sourced before sourcing AZUREPW.env" + exit 2 +fi + +if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + + export launcher="srun --mpi=pmi2 -l" + + (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node )) + (( ufs_ntasks = nnodes*tasks_per_node )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ufs_ntasks}" + unset nnodes ufs_ntasks + +elif [[ "${step}" = "post" ]]; then + + export NTHREADS_NP=${NTHREADS1} + export APRUN_NP="${APRUN}" + + export NTHREADS_DWN=${threads_per_task_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} + export APRUN_DWN="${launcher} -n ${ntasks_dwn}" + +fi diff --git a/env/GAEA.env b/env/GAEA.env index 6809a9b186..be5e9f0ca7 100755 --- a/env/GAEA.env +++ b/env/GAEA.env @@ -34,12 +34,42 @@ else exit 2 fi -if [[ "${step}" = "waveinit" ]]; then +if [[ "${step}" = "prep" ]]; then - export CFP_MP="YES" - if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi - export wavempexec=${launcher} - export wave_mpmd=${mpmd_opt} + export POE="NO" + export BACK="NO" + export sys_tp="GAEA" + export launcher_PREP="srun" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then + + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + export NTHREADS_GSI=${NTHREADSmax} + export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + + export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} + export APRUN_CALCINC="${launcher} \$ncmd --cpus-per-task=${NTHREADS_CALCINC}" + + export NTHREADS_CYCLE=${threads_per_task_cycle:-12} + [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} + ntasks_cycle=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${ntasks_cycle} --cpus-per-task=${NTHREADS_CYCLE}" + + export NTHREADS_GAUSFCANL=1 + ntasks_gausfcanl=${ntasks_gausfcanl:-1} + export APRUN_GAUSFCANL="${launcher} -n ${ntasks_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}" + +elif [[ "${step}" = "sfcanl" ]]; then + + export NTHREADS_CYCLE=${threads_per_task:-14} + export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "fcst" ]]; then @@ -49,9 +79,14 @@ elif [[ "${step}" = "fcst" ]]; then export APRUN_UFS="${launcher} -n ${ufs_ntasks}" unset nnodes ufs_ntasks +elif [[ "${step}" = "upp" ]]; then + + export NTHREADS_UPP=${NTHREADS1} + export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}" + elif [[ "${step}" = "atmos_products" ]]; then - export USE_CFP="YES" # Use MPMD for downstream product generation on Hera + export USE_CFP="YES" # Use MPMD for downstream product generation on Gaea elif [[ "${step}" = "oceanice_products" ]]; then diff --git a/env/GOOGLEPW.env b/env/GOOGLEPW.env new file mode 100755 index 0000000000..7d912eaf8b --- /dev/null +++ b/env/GOOGLEPW.env @@ -0,0 +1,107 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + exit 1 + +fi + +step=$1 + +export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" + +# Configure MPI environment +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + +ulimit -s unlimited +ulimit -a + +# Calculate common variables +# Check first if the dependent variables are set +if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then + max_threads_per_task=$((max_tasks_per_node / tasks_per_node)) + NTHREADSmax=${threads_per_task:-${max_threads_per_task}} + NTHREADS1=${threads_per_task:-1} + [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} + [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} + APRUN="${launcher} -n ${ntasks}" +else + echo "ERROR config.resources must be sourced before sourcing GOOGLEPW.env" + exit 2 +fi + +if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + + export launcher="srun --mpi=pmi2 -l" + + (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node )) + (( ufs_ntasks = nnodes*tasks_per_node )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ufs_ntasks}" + unset nnodes ufs_ntasks + +elif [[ "${step}" = "prep_emissions" ]]; then + + export APRUN + +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + + export CFP_MP="YES" + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi + export wavempexec=${launcher} + export wave_mpmd=${mpmd_opt} + +elif [[ "${step}" = "post" ]]; then + + export NTHREADS_NP=${NTHREADS1} + export APRUN_NP="${APRUN}" + + export NTHREADS_DWN=${threads_per_task_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} + export APRUN_DWN="${launcher} -n ${ntasks_dwn}" + +elif [[ "${step}" = "atmos_products" ]]; then + + export USE_CFP="YES" # Use MPMD for downstream product generation on Hera + +elif [[ "${step}" = "oceanice_products" ]]; then + + export NTHREADS_OCNICEPOST=${NTHREADS1} + export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}" + +elif [[ "${step}" = "ecen" ]]; then + + export NTHREADS_ECEN=${NTHREADSmax} + export APRUN_ECEN="${APRUN}" + + export NTHREADS_CHGRES=${threads_per_task_chgres:-12} + [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} + export APRUN_CHGRES="time" + + export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} + export APRUN_CALCINC="${APRUN}" + +elif [[ "${step}" = "esfc" ]]; then + + export NTHREADS_ESFC=${NTHREADSmax} + export APRUN_ESFC="${APRUN}" + + export NTHREADS_CYCLE=${threads_per_task_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} + export APRUN_CYCLE="${APRUN}" + +elif [[ "${step}" = "epos" ]]; then + + export NTHREADS_EPOS=${NTHREADSmax} + export APRUN_EPOS="${APRUN}" + +elif [[ "${step}" = "fit2obs" ]]; then + + export NTHREADS_FIT2OBS=${NTHREADS1} + export MPIRUN="${APRUN}" + +fi diff --git a/env/HERA.env b/env/HERA.env index 3f0e7c9f36..272c6773f9 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -72,6 +72,16 @@ elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}" +elif [[ "${step}" = "atmensanlobs" ]]; then + + export NTHREADS_ATMENSANLOBS=${NTHREADSmax} + export APRUN_ATMENSANLOBS="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" + +elif [[ "${step}" = "atmensanlsol" ]]; then + + export NTHREADS_ATMENSANLSOL=${NTHREADSmax} + export APRUN_ATMENSANLSOL="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" + elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} @@ -80,15 +90,20 @@ elif [[ "${step}" = "atmensanlletkf" ]]; then elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" + export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} @@ -106,6 +121,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" diff --git a/env/HERCULES.env b/env/HERCULES.env index 83fa1aadd1..62b579dda3 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -76,6 +76,16 @@ case ${step} in export NTHREADS_ATMANLFV3INC=${NTHREADSmax} export APRUN_ATMANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" ;; + "atmensanlobs") + + export NTHREADS_ATMENSANLOBS=${NTHREADSmax} + export APRUN_ATMENSANLOBS="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" + ;; + "atmensanlsol") + + export NTHREADS_ATMENSANLSOL=${NTHREADSmax} + export APRUN_ATMENSANLSOL="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" + ;; "atmensanlletkf") export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} @@ -86,12 +96,17 @@ case ${step} in export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" ;; - "aeroanlrun") + "aeroanlvar") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" + ;; + "aeroanlgenb") + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" ;; "prepobsaero") @@ -105,6 +120,14 @@ case ${step} in export APRUN_APPLY_INCR="${launcher} -n 6" ;; + "esnowrecen") + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + ;; + "marinebmat") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/env/JET.env b/env/JET.env index 810a8cd501..52730fc74c 100755 --- a/env/JET.env +++ b/env/JET.env @@ -60,6 +60,16 @@ elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} export APRUN_ATMANLVAR="${APRUN}" +elif [[ "${step}" = "atmensanlobs" ]]; then + + export NTHREADS_ATMENSANLOBS=${NTHREADSmax} + export APRUN_ATMENSANLOBS="${APRUN}" + +elif [[ "${step}" = "atmensanlsol" ]]; then + + export NTHREADS_ATMENSANLSOL=${NTHREADSmax} + export APRUN_ATMENSANLSOL="${APRUN}" + elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} @@ -70,13 +80,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${launcher} ${ntasks}" -elif [[ "${step}" = "aeroanlrun" ]]; then - - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" +elif [[ "${step}" = "aeroanlvar" ]]; then export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} @@ -89,6 +104,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/ORION.env b/env/ORION.env index bbbfb59182..638764908f 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -68,6 +68,15 @@ elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}" +elif [[ "${step}" = "atmensanlobs" ]]; then + + export NTHREADS_ATMENSANLOBS=${NTHREADSmax} + export APRUN_ATMENSANLOBS="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" + +elif [[ "${step}" = "atmensanlsol" ]]; then + + export NTHREADS_ATMENSANLSOL=${NTHREADSmax} + export APRUN_ATMENSANLSOL="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} @@ -78,13 +87,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} @@ -97,6 +111,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/S4.env b/env/S4.env index 840ca65898..dd852afa0f 100755 --- a/env/S4.env +++ b/env/S4.env @@ -60,6 +60,16 @@ elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} export APRUN_ATMANLVAR="${APRUN}" +elif [[ "${step}" = "atmensanlobs" ]]; then + + export NTHREADS_ATMENSANLOBS=${NTHREADSmax} + export APRUN_ATMENSANLOBS="${APRUN}" + +elif [[ "${step}" = "atmensanlsol" ]]; then + + export NTHREADS_ATMENSANLSOL=${NTHREADSmax} + export APRUN_ATMENSANLSOL="${APRUN}" + elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} @@ -68,15 +78,20 @@ elif [[ "${step}" = "atmensanlletkf" ]]; then elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN}" + export APRUN_ATMENSANLFV3INC="${APRUN}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} @@ -89,6 +104,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 18caf1bc03..2640f85de2 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -53,6 +53,16 @@ elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} export APRUN_ATMANLVAR="${APRUN}" +elif [[ "${step}" = "atmensanlobs" ]]; then + + export NTHREADS_ATMENSANLOBS=${NTHREADSmax} + export APRUN_ATMENSANLOBS="${APRUN}" + +elif [[ "${step}" = "atmensanlsol" ]]; then + + export NTHREADS_ATMENSANLSOL=${NTHREADSmax} + export APRUN_ATMENSANLSOL="${APRUN}" + elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} @@ -63,13 +73,18 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} export APRUN_ATMENSANLFV3INC="${APRUN}" -elif [[ "${step}" = "aeroanlrun" ]]; then +elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} export APRUN_AEROANL="${APRUN}" +elif [[ "${step}" = "aeroanlgenb" ]]; then + + export NTHREADS_AEROANLGENB=${NTHREADSmax} + export APRUN_AEROANLGENB="${APRUN}" + elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} @@ -82,6 +97,40 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + +elif [[ "${step}" = "marinebmat" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + export APRUN_MARINEBMAT="${APRUN}" + +elif [[ "${step}" = "ocnanalrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + export APRUN_OCNANAL="${APRUN}" + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + export APRUN_OCNANAL="${APRUN}" + +elif [[ "${step}" = "ocnanalecen" ]]; then + + export NTHREADS_OCNANALECEN=${NTHREADSmax} + export APRUN_OCNANALECEN="${APRUN} --cpus-per-task=${NTHREADS_OCNANALECEN}" + +elif [[ "${step}" = "marineanalletkf" ]]; then + + export NTHREADS_MARINEANALLETKF=${NTHREADSmax} + export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} @@ -223,7 +272,7 @@ elif [[ "${step}" = "postsnd" ]]; then export OMP_NUM_THREADS=1 export NTHREADS_POSTSND=${NTHREADS1} - export APRUN_POSTSND="${APRUN} --depth=${NTHREADS_POSTSND} --cpu-bind depth" + export mpmd_opt="-ppn 21 ${mpmd_opt}" export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task} diff --git a/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX b/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX new file mode 100755 index 0000000000..81c89e9155 --- /dev/null +++ b/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX @@ -0,0 +1,46 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlgenb" -c "base aeroanl aeroanlgenb" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL \ + COMOUT_CHEM_BMAT:COM_CHEM_BMAT_TMPL \ + COMIN_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL + +mkdir -p "${COMOUT_CHEM_BMAT}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASAEROBMATPY:-${SCRgfs}/exgdas_aero_analysis_generate_bmatrix.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGDAS_ENKF_SNOW_RECENTER b/jobs/JGDAS_ENKF_SNOW_RECENTER new file mode 100755 index 0000000000..05d46cffc2 --- /dev/null +++ b/jobs/JGDAS_ENKF_SNOW_RECENTER @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowrecen" -c "base esnowrecen" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDUMP="gdas" +export GDUMP + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ + COMOUT_CONF:COM_CONF_TMPL +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + +mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" + +for imem in $(seq 1 "${NMEM_ENS}"); do + memchar="mem$(printf %03i "${imem}")" + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + mkdir -p "${COMOUT_SNOW_ANALYSIS}" +done + +############################################################### +# Run relevant script + +EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exgdas_enkf_snow_recenter.py} +${EXSCRIPT} +status=$? +(( status != 0 )) && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE index 455f572da5..9c68d1fed6 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -8,25 +8,15 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroan ############################################## # Set variables used in the script ############################################## -# shellcheck disable=SC2153 -GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -gcyc=${GDATE:8:2} -GDUMP="gdas" - ############################################## # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS - -RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL - -mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL \ + COMOUT_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE index b2a2893bc0..921b1458b2 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -19,13 +19,13 @@ GDUMP="gdas" ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COM_OBS:COM_OBS_TMPL \ + COMOUT_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL - -mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" + COMIN_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL \ + COMIN_CHEM_BMAT_PREV:COM_CHEM_BMAT_TMPL ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_RUN b/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL similarity index 83% rename from jobs/JGLOBAL_AERO_ANALYSIS_RUN rename to jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL index 43749b78c5..290d7225dd 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_RUN +++ b/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL @@ -3,7 +3,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlrun" +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroanlvar" ############################################## # Set variables used in the script @@ -16,7 +16,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlr ############################################################### # Run relevant script -EXSCRIPT=${GDASAERORUNSH:-${SCRgfs}/exglobal_aero_analysis_run.py} +EXSCRIPT=${GDASAEROVARSH:-${SCRgfs}/exglobal_aero_analysis_variational.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS b/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS new file mode 100755 index 0000000000..9d858a8a37 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlobs" -c "base atmensanl atmensanlobs" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSOBSSH:-${SCRgfs}/exglobal_atmens_analysis_obs.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL b/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL new file mode 100755 index 0000000000..415791cdd0 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlsol" -c "base atmensanl atmensanlsol" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSSOLSH:-${SCRgfs}/exglobal_atmens_analysis_sol.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_PREP_SNOW_OBS b/jobs/JGLOBAL_PREP_SNOW_OBS index f5ea3fc122..0e3557697d 100755 --- a/jobs/JGLOBAL_PREP_SNOW_OBS +++ b/jobs/JGLOBAL_PREP_SNOW_OBS @@ -41,4 +41,10 @@ if [[ -e "${pgmout}" ]] ; then cat "${pgmout}" fi +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + exit 0 diff --git a/jobs/JGLOBAL_SNOW_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS index b7d8c37060..e0f24fa624 100755 --- a/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ANALYSIS @@ -44,4 +44,10 @@ if [[ -e "${pgmout}" ]] ; then cat "${pgmout}" fi +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + exit 0 diff --git a/jobs/JGLOBAL_STAGE_IC b/jobs/JGLOBAL_STAGE_IC index 52225ac9d3..b8126f8efe 100755 --- a/jobs/JGLOBAL_STAGE_IC +++ b/jobs/JGLOBAL_STAGE_IC @@ -3,15 +3,16 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" -# Restart conditions for GFS cycle come from GDAS -# shellcheck disable=SC2153 -rCDUMP=${RUN} -# shellcheck disable=SC2153 -[[ ${RUN} = "gfs" ]] && export rCDUMP="gdas" -export rCDUMP +# Execute staging +"${SCRgfs}/exglobal_stage_ic.py" +err=$? -# Execute the Script -"${SCRgfs}/exglobal_stage_ic.sh" +############################################################### +# Check for errors and exit if any of the above failed +if [[ "${err}" -ne 0 ]]; then + echo "FATAL ERROR: Unable to copy ICs to ${ROTDIR}; ABORT!" + exit "${err}" +fi ########################################## # Remove the Temporary working directory diff --git a/jobs/rocoto/aeroanlgenb.sh b/jobs/rocoto/aeroanlgenb.sh new file mode 100755 index 0000000000..d0bc5dda9b --- /dev/null +++ b/jobs/rocoto/aeroanlgenb.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="aeroanlgenb" +export jobid="${job}.$$" + +############################################################### + +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX" +status=$? +exit "${status}" diff --git a/jobs/rocoto/aeroanlvar.sh b/jobs/rocoto/aeroanlvar.sh new file mode 100755 index 0000000000..7aa7d831f9 --- /dev/null +++ b/jobs/rocoto/aeroanlvar.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="aeroanlvar" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL" +status=$? +exit "${status}" diff --git a/jobs/rocoto/arch_test.sh b/jobs/rocoto/arch_test.sh deleted file mode 100755 index c723c842aa..0000000000 --- a/jobs/rocoto/arch_test.sh +++ /dev/null @@ -1,3 +0,0 @@ -#! /usr/bin/env bash -############################################################### -exit 0 diff --git a/jobs/rocoto/atmensanlobs.sh b/jobs/rocoto/atmensanlobs.sh new file mode 100755 index 0000000000..d02d013bcd --- /dev/null +++ b/jobs/rocoto/atmensanlobs.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlobs" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlsol.sh b/jobs/rocoto/atmensanlsol.sh new file mode 100755 index 0000000000..e1fe59d986 --- /dev/null +++ b/jobs/rocoto/atmensanlsol.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlsol" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL" +status=$? +exit "${status}" diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/esnowrecen.sh similarity index 84% rename from jobs/rocoto/aeroanlrun.sh rename to jobs/rocoto/esnowrecen.sh index 529bb2d7d1..f8c4f8f7fc 100755 --- a/jobs/rocoto/aeroanlrun.sh +++ b/jobs/rocoto/esnowrecen.sh @@ -8,11 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="aeroanlrun" +export job="esnowrecen" export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN" +"${HOMEgfs}/jobs/JGDAS_ENKF_SNOW_RECENTER" status=$? exit "${status}" diff --git a/modulefiles/module_base.gaea.lua b/modulefiles/module_base.gaea.lua index 55ad6b0c34..b08e79c274 100644 --- a/modulefiles/module_base.gaea.lua +++ b/modulefiles/module_base.gaea.lua @@ -15,6 +15,7 @@ load(pathJoin("cdo", (os.getenv("cdo_ver") or "None"))) load(pathJoin("hdf5", (os.getenv("hdf5_ver") or "None"))) load(pathJoin("netcdf-c", (os.getenv("netcdf_c_ver") or "None"))) load(pathJoin("netcdf-fortran", (os.getenv("netcdf_fortran_ver") or "None"))) +load(pathJoin("perlbrew", (os.getenv("perl_ver") or "None"))) load(pathJoin("nco", (os.getenv("nco_ver") or "None"))) load(pathJoin("prod_util", (os.getenv("prod_util_ver") or "None"))) @@ -25,6 +26,7 @@ load(pathJoin("crtm", (os.getenv("crtm_ver") or "None"))) load(pathJoin("bufr", (os.getenv("bufr_ver") or "None"))) load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None"))) load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None"))) +load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None"))) load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None"))) load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None"))) load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None"))) @@ -36,4 +38,13 @@ load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) setenv("WGRIB2","wgrib2") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) +--prepend_path("MODULEPATH", pathJoin("/gpfs/f5/ufs-ard/world-shared/global/glopara/data/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) +--load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None"))) +prepend_path("MODULEPATH", pathJoin("/gpfs/f5/ufs-ard/world-shared/global/glopara/data/git/prepobs/v1.1.0", "modulefiles")) +load(pathJoin("prepobs", "1.1.0")) + +prepend_path("MODULEPATH", pathJoin("/gpfs/f5/ufs-ard/world-shared/global/glopara/data/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +load(pathJoin("fit2obs", (os.getenv("fit2obs_ver") or "None"))) + + whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.gaea.lua b/modulefiles/module_gwsetup.gaea.lua index 8b9f70e4a0..0bcc689bad 100644 --- a/modulefiles/module_gwsetup.gaea.lua +++ b/modulefiles/module_gwsetup.gaea.lua @@ -15,5 +15,6 @@ load(pathJoin("python", python_ver)) load("py-jinja2") load("py-pyyaml") load("py-numpy") +load("git-lfs") whatis("Description: GFS run setup environment") diff --git a/parm/archive/enkf.yaml.j2 b/parm/archive/enkf.yaml.j2 index 92ed0095af..a95046d4d6 100644 --- a/parm/archive/enkf.yaml.j2 +++ b/parm/archive/enkf.yaml.j2 @@ -15,17 +15,21 @@ enkf: - "logs/{{ cycle_YMDH }}/{{ RUN }}ecen{{ '%03d' % grp }}.log" {% endfor %} - {% if DO_JEDIATMENS %} - {% set steps = ["atmensanlinit", "atmensanlletkf", "atmensanlfv3inc", "atmensanlfinal"] %} - {% else %} - {% set steps = ["eobs", "eupd"] %} {% if lobsdiag_forenkf %} - {% do steps.append("ediag") %} + {% if DO_JEDIATMENS %} + {% set steps = ["atmensanlinit", "atmensanlobs", "atmensanlsol", "atmensanlfv3inc", "atmensanlfinal"] %} + {% else %} + {% set steps = ["eobs", "ediag", "eupd"] %} + {% endif %} {% else %} - {% for mem in range(1, nmem_ens + 1) %} - {% do steps.append("eomg_mem{{ '%03d' % mem }}") %} - {% endfor %} - {% endif %} + {% if DO_JEDIATMENS %} + {% set steps = ["atmensanlinit", "atmensanlletkf", "atmensanlfv3inc", "atmensanlfinal"] %} + {% else %} + {% set steps = ["eobs", "eupd"] %} + {% for mem in range(1, nmem_ens + 1) %} + {% do steps.append("eomg_mem{{ '%03d' % mem }}") %} + {% endfor %} + {% endif %} {% endif %} {% for step in steps %} @@ -49,9 +53,18 @@ enkf: "oznstat.ensmean", "radstat.ensmean"] %} {% else %} - {% set da_files = ["atmens.yaml", + {% if lobsdiag_forenkf %} + {% set da_files = ["atmensanlobs.yaml", + "atmensanlsol.yaml", + "atmensanlfv3inc.yaml", "atminc.ensmean.nc", "atmensstat"] %} + {% else %} + {% set da_files = ["atmensanlletkf.yaml", + "atmensanlfv3inc.yaml", + "atminc.ensmean.nc", + "atmensstat"] %} + {% endif %} {% endif %} {% for file in da_files %} - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}{{ file }}" diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index db92141ede..56e47e595a 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -58,7 +58,8 @@ gdas: # Analysis state {% if DO_JEDIATMVAR %} - - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanlvar.yaml" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanlfv3inc.yaml" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" {% else %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" diff --git a/parm/archive/gefs_arcdir.yaml.j2 b/parm/archive/gefs_arcdir.yaml.j2 new file mode 100644 index 0000000000..a59a0e1a8f --- /dev/null +++ b/parm/archive/gefs_arcdir.yaml.j2 @@ -0,0 +1,38 @@ +{% set cycle_HH = current_cycle | strftime("%H") %} +{% set cycle_YMDH = current_cycle | to_YMDH %} +{% set cycle_YMD = current_cycle | to_YMD %} +{% set head = RUN + ".t" + cycle_HH + "z." %} + +# Declare the GEFS_ARCH where atmos data will be sent +{% set GEFS_ARCH = ROTDIR ~ "/gefsarch" %} + +{% set file_set = [] %} + +{% set tmpl_dict = ({ '${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':cycle_YMD, + '${HH}':cycle_HH, + '${GRID}': '1p00', + '${MEMDIR}': 'ensstat' }) %} + +{% set COMIN_ATMOS_ENSSTAT_1p00 = COM_ATMOS_GRIB_GRID_TMPL | replace_tmpl(tmpl_dict) %} + +# Select ensstat files to copy to the arcdir +{% if RUN == "gefs" %} + {% set ensstat_files = [] %} + {% if path_exists(COMIN_ATMOS_ENSSTAT_1p00) %} + {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} + {% do ensstat_files.append([COMIN_ATMOS_ENSSTAT_1p00 ~ "/" ~ head ~ "mean.pres_." ~ + "1p00" ~ ".f" ~ '%03d'|format(fhr) ~ ".grib2", + GEFS_ARCH]) %} + {% endfor %} + {% endif %} +{% endif %} +{% set file_set = ensstat_files %} +# Actually write the yaml +mkdir: + - "{{ GEFS_ARCH }}" +copy: + {% for source_dest_pair in file_set %} + - {{ source_dest_pair }} + {% endfor %} diff --git a/parm/archive/arcdir.yaml.j2 b/parm/archive/gfs_arcdir.yaml.j2 similarity index 100% rename from parm/archive/arcdir.yaml.j2 rename to parm/archive/gfs_arcdir.yaml.j2 diff --git a/parm/archive/gfsa.yaml.j2 b/parm/archive/gfsa.yaml.j2 index 4a86778e2e..226a7178fa 100644 --- a/parm/archive/gfsa.yaml.j2 +++ b/parm/archive/gfsa.yaml.j2 @@ -32,7 +32,8 @@ gfsa: # State data {% if DO_JEDIATMVAR %} - - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanlvar.yaml" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanlfv3inc.yaml" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" {% else %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index fad9e3421a..47474fb108 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -38,7 +38,6 @@ export FIXugwd=${FIXgfs}/ugwd export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops export COMINsyn="@COMINsyn@" -export BASE_CPLIC="@BASE_CPLIC@" # USER specific paths export HOMEDIR="@HOMEDIR@" @@ -269,10 +268,6 @@ export OUTPUT_GRID="gaussian_grid" export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST export WRITE_NSFLIP=".true." -# Override normal post flat files for GEFS -export FLTFILEGFS="${PARMgfs}/post/postxconfig-NT-GEFS.txt" -export FLTFILEGFSF00="${PARMgfs}/post/postxconfig-NT-GEFS-F00.txt" - # Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL export imp_physics=8 @@ -346,4 +341,10 @@ export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arc # Number of regional collectives to create soundings for export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} +# The tracker, genesis, and METplus jobs are not supported on CSPs yet +# TODO: we should place these in workflow/hosts/[csp]pw.yaml as part of AWS/AZURE/GOOGLE setup, not for general. +if [[ "${machine}" =~ "PW" ]]; then + export DO_WAVE="NO" +fi + echo "END: config.base" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 407e48496e..efdedb24f4 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -193,6 +193,7 @@ case ${imp_physics} in export dt_inner=$((DELTIM/2)) export sedi_semi=.true. if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + if [[ dt_inner -gt 300 ]]; then export dt_inner=300 ; fi export decfl=10 export hord_mt_nh_nonmono=5 diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 297bc08c05..79f3426f56 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -41,7 +41,11 @@ case ${machine} in ;; "AWSPW") export PARTITION_BATCH="compute" - max_tasks_per_node=40 + max_tasks_per_node=36 + ;; + "GOOGLEPW") + export PARTITION_BATCH="compute" + max_tasks_per_node=32 ;; *) echo "FATAL ERROR: Unknown machine encountered by ${BASH_SOURCE[0]}" diff --git a/parm/config/gefs/config.resources.AWSPW b/parm/config/gefs/config.resources.AWSPW new file mode 100644 index 0000000000..a735c7622d --- /dev/null +++ b/parm/config/gefs/config.resources.AWSPW @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +# AWS-specific job resources + +export is_exclusive="True" +unset memory + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/parm/config/gefs/config.resources.GOOGLEPW b/parm/config/gefs/config.resources.GOOGLEPW new file mode 100644 index 0000000000..21e54013c7 --- /dev/null +++ b/parm/config/gefs/config.resources.GOOGLEPW @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +# GOOGLE-specific job resources + +export is_exclusive="True" +unset memory + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/parm/config/gefs/config.stage_ic b/parm/config/gefs/config.stage_ic index f0b5dfa609..cac65c74b9 100644 --- a/parm/config/gefs/config.stage_ic +++ b/parm/config/gefs/config.stage_ic @@ -7,32 +7,27 @@ echo "BEGIN: config.stage_ic" # Get task specific resources source "${EXPDIR}/config.resources" stage_ic -case "${CASE}" in - "C384") - export CPL_ATMIC="" - export CPL_ICEIC="" - export CPL_OCNIC="" - export CPL_WAVIC="" - export CPL_MEDIC="" - ;; - "C96") - export CPL_ATMIC="" - export CPL_ICEIC="" - export CPL_OCNIC="" - export CPL_WAVIC="" - export CPL_MEDIC="" - ;; - "C48") - export CPL_ATMIC="gefs_test" - export CPL_ICEIC="gefs_test" - export CPL_OCNIC="gefs_test" - export CPL_WAVIC="gefs_test" - export CPL_MEDIC="gefs_test" - ;; - *) - echo "FATAL ERROR Unrecognized resolution: ${CASE}" - exit 1 - ;; -esac +export ICSDIR="@ICSDIR@" # User provided ICSDIR; blank if not provided +export BASE_IC="@BASE_IC@" # Platform home for staged ICs + +export STAGE_IC_YAML_TMPL="${PARMgfs}/stage/master_gefs.yaml.j2" + +# Set ICSDIR + +if [[ -z "${ICSDIR}" ]] ; then + + ic_ver="20240610" + + if (( NMEM_ENS > 0 )) ; then + ensic="${CASE_ENS}" + fi + + if [[ "${DO_OCN:-NO}" == "YES" ]] ; then + ocnic="mx${OCNRES}" + fi + + export ICSDIR="${BASE_IC}/${CASE}${ensic:-}${ocnic:-}/${ic_ver}" + +fi echo "END: config.stage_ic" diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index 584e4769a8..bfc11e3c5a 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -254,6 +254,25 @@ export ntasks_fv3_gfs export ntasks_quilt export ntasks_quilt_gfs +# Determine whether to use compression in the write grid component based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + zstandard_level=0 + ideflate=0 + quantize_nsd=0 + ;; + "C768" | "C1152" | "C3072") + zstandard_level=0 + ideflate=1 + quantize_nsd=5 + ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; +esac +export zstandard_level ideflate quantize_nsd + # Determine whether to use parallel NetCDF based on resolution case ${fv3_res} in "C48" | "C96" | "C192" | "C384") @@ -353,7 +372,7 @@ if [[ "${skip_mom6}" == "false" ]]; then if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" - else + else MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" MOM6_DIAG_MISVAL="0.0" fi diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml index e4666d1aba..5ecf690e18 100644 --- a/parm/config/gefs/yaml/defaults.yaml +++ b/parm/config/gefs/yaml/defaults.yaml @@ -14,3 +14,5 @@ base: FCST_BREAKPOINTS: "48" REPLAY_ICS: "NO" USE_OCN_PERTURB_FILES: "false" + HPSSARCH: "NO" + LOCALARCH: "NO" diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index a1b7e1d44b..5ac03bd7ee 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -5,20 +5,36 @@ echo "BEGIN: config.aeroanl" -export CASE_ANL=${CASE} +# define analysis resolution based on deterministic res +case ${CASE} in + "C1152" | "C768" | "C384" | "C192") + CASE_ANL="C192" + ;; + "C96" | "C48") + CASE_ANL=${CASE} + ;; + *) + echo "FATAL ERROR: Aerosol DA not supported at ${CASE} resolution" + exit 4 +esac +export CASE_ANL export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" -export STATICB_TYPE='identity' +export STATICB_TYPE='diffusion' export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" -export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" -export BERROR_DATE="20160630.000000" +export BERROR_DATA_DIR="${FIXgfs}/gdas/aero/clim_b" export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" +export AERO_STAGE_VARIATIONAL_TMPL="${PARMgfs}/gdas/aero_stage_variational.yaml.j2" +export AERO_FINALIZE_VARIATIONAL_TMPL="${PARMgfs}/gdas/aero_finalize_variational.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE="${EXECgfs}/gdas.x" +export BMATEXE="${EXECgfs}/gdasapp_chem_diagb.x" +export DIFFUSIONEXE="${EXECgfs}/gdas_fv3jedi_error_covariance_toolbox.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" diff --git a/parm/config/gfs/config.aeroanlgenb b/parm/config/gfs/config.aeroanlgenb new file mode 100644 index 0000000000..b41b22a524 --- /dev/null +++ b/parm/config/gfs/config.aeroanlgenb @@ -0,0 +1,29 @@ +#!/bin/bash -x + +########## config.aeroanlgenb ########## +# Aerosol Variance specific + +echo "BEGIN: config.aeroanlgenb" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlgenb + +export BMATYAML="${PARMgfs}/gdas/aero/berror/aero_diagb.yaml.j2" +export DIFFUSIONYAML="${PARMgfs}/gdas/aero/berror/aero_diffusionparm.yaml.j2" +export INTERPYAML="${PARMgfs}/gdas/aero/berror/aero_interp.yaml.j2" +export AERO_BMATRIX_STAGE_TMPL="${PARMgfs}/gdas/aero_stage_bmatrix_bkg.yaml.j2" +export AERO_BMATRIX_FINALIZE_TMPL="${PARMgfs}/gdas/aero_finalize_bmatrix_bkg.yaml.j2" +export aero_diffusion_iter=10 +export aero_diffusion_horiz_len=2500e3 +export aero_diffusion_fixed_val=1.0 +export npx_clim_b=97 +export npy_clim_b=97 +export aero_diagb_weight=0.9 +export aero_staticb_rescaling_factor=2.0 +export aero_diagb_rescale=20.0 +export aero_diagb_n_halo=4 +export aero_diagb_n_neighbors=16 +export aero_diagb_smooth_horiz_iter=0 +export aero_diagb_smooth_vert_iter=0 + +echo "END: config.aeroanlgenb" diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun deleted file mode 100644 index 012e5b79f3..0000000000 --- a/parm/config/gfs/config.aeroanlrun +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -x - -########## config.aeroanlrun ########## -# Aerosol Analysis specific - -echo "BEGIN: config.aeroanlrun" - -# Get task specific resources -source "${EXPDIR}/config.resources" aeroanlrun - -echo "END: config.aeroanlrun" diff --git a/parm/config/gfs/config.aeroanlvar b/parm/config/gfs/config.aeroanlvar new file mode 100644 index 0000000000..4282b6c840 --- /dev/null +++ b/parm/config/gfs/config.aeroanlvar @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlvar ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlvar" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlvar + +echo "END: config.aeroanlvar" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index ddd3d88659..f5a1278248 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -6,7 +6,11 @@ echo "BEGIN: config.atmensanl" export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" -export JCB_ALGO_YAML=@JCB_ALGO_YAML@ +if [[ ${lobsdiag_forenkf} = ".false." ]] ; then + export JCB_ALGO_YAML=@JCB_ALGO_YAML_LETKF@ +else + export JCB_ALGO_YAML=@JCB_ALGO_YAML_OBS@ +fi export INTERP_METHOD='barycentric' diff --git a/parm/config/gfs/config.atmensanlobs b/parm/config/gfs/config.atmensanlobs new file mode 100644 index 0000000000..dff3fa3095 --- /dev/null +++ b/parm/config/gfs/config.atmensanlobs @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.atmensanlobs ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlobs + +export JCB_ALGO_YAML=@JCB_ALGO_YAML@ + +echo "END: config.atmensanlobs" diff --git a/parm/config/gfs/config.atmensanlsol b/parm/config/gfs/config.atmensanlsol new file mode 100644 index 0000000000..dac161373b --- /dev/null +++ b/parm/config/gfs/config.atmensanlsol @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.atmensanlsol ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlsol" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlsol + +export JCB_ALGO_YAML=@JCB_ALGO_YAML@ + +echo "END: config.atmensanlsol" diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index e6a626cfe3..81b18030fa 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -47,7 +47,6 @@ export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops export COMINsyn="@COMINsyn@" export DMPDIR="@DMPDIR@" -export BASE_CPLIC="@BASE_CPLIC@" # Gempak from external models # Default locations are to dummy locations for testing @@ -471,6 +470,7 @@ export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs foreca # The monitor jobs are not yet supported for JEDIATMVAR. if [[ ${DO_JEDIATMVAR} = "YES" ]]; then + export DO_FIT2OBS="NO" # Run fit to observations package export DO_VERFOZN="NO" # Ozone data assimilation monitoring export DO_VERFRAD="NO" # Radiance data assimilation monitoring export DO_VMINMON="NO" # GSI minimization monitoring @@ -483,12 +483,13 @@ export OFFSET_START_HOUR=0 # Number of regional collectives to create soundings for export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} -# The tracker, genesis, and METplus jobs are not supported on AWS yet -# TODO: we should place these in workflow/hosts/awspw.yaml as part of AWS setup, not for general. -if [[ "${machine}" == "AWSPW" ]]; then +# The tracker, genesis, and METplus jobs are not supported on CSPs yet +# TODO: we should place these in workflow/hosts/awspw.yaml as part of AWS/AZURE setup, not for general. +if [[ "${machine}" =~ "PW" ]]; then export DO_TRACKER="NO" export DO_GENESIS="NO" export DO_METP="NO" + export DO_WAVE="NO" fi echo "END: config.base" diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 222ffdae95..61d592561d 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -12,7 +12,7 @@ echo "BEGIN: config.com" # declare_from_tmpl [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] # # options: -# -r: Make variable read-only (same as `decalre -r`) +# -r: Make variable read-only (same as `declare -r`) # -x: Mark variable for declare -rx (same as `declare -x`) # var1, var2, etc: Variable names whose values will be generated from a template # and declared @@ -51,12 +51,12 @@ declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' declare -rx COM_CONF_TMPL=${COM_BASE}'/conf' declare -rx COM_OBS_JEDI=${COM_BASE}'/obs_jedi' -declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' -declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model/atmos/restart' declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' declare -rx COM_SNOW_ANALYSIS_TMPL=${COM_BASE}'/analysis/snow' -declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' -declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model/atmos/master' declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2' declare -rx COM_ATMOS_GRIB_GRID_TMPL=${COM_ATMOS_GRIB_TMPL}'/${GRID}' declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' @@ -70,17 +70,17 @@ declare -rx COM_ATMOS_RADMON_TMPL=${COM_BASE}'/products/atmos/radmon' declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' -declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' -declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' -declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model/wave/history' declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' -declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' -declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' -declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model/ocean/input' declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' declare -rx COM_OCEAN_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ocean' declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' @@ -89,14 +89,15 @@ declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' declare -rx COM_ICE_ANALYSIS_TMPL=${COM_BASE}'/analysis/ice' declare -rx COM_ICE_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ice' -declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' -declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' -declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model/ice/restart' declare -rx COM_ICE_NETCDF_TMPL=${COM_BASE}'/products/ice/netcdf' declare -rx COM_ICE_GRIB_TMPL=${COM_BASE}'/products/ice/grib2' declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}' -declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model/chem/history' declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' +declare -rx COM_CHEM_BMAT_TMPL=${COM_CHEM_ANALYSIS_TMPL}'/bmatrix' -declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model/med/restart' diff --git a/parm/config/gfs/config.esnowrecen b/parm/config/gfs/config.esnowrecen new file mode 100644 index 0000000000..adb039559a --- /dev/null +++ b/parm/config/gfs/config.esnowrecen @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.esnowrecen ########## +# configuration common to snow ensemble analysis tasks + +echo "BEGIN: config.esnowrecen" + +# Get task specific resources +source "${EXPDIR}/config.resources" esnowrecen + +export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" + +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" +export SNOW_OROG_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_orog.yaml.j2" +export SNOW_ENS_FINALIZE_TMPL="${PARMgfs}/gdas/snow_finalize_ens_update.yaml.j2" + +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" +export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/ens_apply_incr_nml.j2" + +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ + +export JEDIEXE=${EXECgfs}/gdasapp_land_ensrecenter.x +export FREGRID=${EXECgfs}/fregrid.x + +echo "END: config.esnowrecen" diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 2743ea0745..da336ff73b 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -209,6 +209,7 @@ case ${imp_physics} in export dt_inner=$((DELTIM/2)) export sedi_semi=.true. if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + if [[ dt_inner -gt 300 ]]; then export dt_inner=300; fi export decfl=10 export hord_mt_nh_nonmono=5 diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index cec2aef238..851acb2e0d 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -14,9 +14,9 @@ if (( $# != 1 )); then echo "stage_ic aerosol_init" echo "prep prepsnowobs prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" - echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" - echo "snowanl" - echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal" + echo "atmensanlinit atmensanlobs atmensanlsol atmensanlletkf atmensanlfv3inc atmensanlfinal" + echo "snowanl esnowrecen" + echo "prepobsaero aeroanlinit aeroanlvar aeroanlfinal aeroanlgenb" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" @@ -112,6 +112,22 @@ case ${machine} in # shellcheck disable=SC2034 mem_node_max="" ;; + "AZUREPW") + export PARTITION_BATCH="compute" + npe_node_max=24 + max_tasks_per_node=24 + # TODO Supply a max mem/node value for AZURE + # shellcheck disable=SC2034 + mem_node_max="" + ;; + "GOOGLEPW") + export PARTITION_BATCH="compute" + npe_node_max=30 + max_tasks_per_node=30 + # TODO Supply a max mem/node value for GOOGLE + # shellcheck disable=SC2034 + mem_node_max="" + ;; "CONTAINER") max_tasks_per_node=1 # TODO Supply a max mem/node value for a container @@ -270,7 +286,7 @@ case ${step} in ntasks=1 threads_per_task=1 tasks_per_node=$(( max_tasks_per_node / threads_per_task )) - memory="3072M" + memory="4GB" ;; "atmanlvar") @@ -340,6 +356,35 @@ case ${step} in tasks_per_node=$(( max_tasks_per_node / threads_per_task )) ;; + "esnowrecen") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") + layout_x=6 + layout_y=6 + ;; + "C384") + layout_x=5 + layout_y=5 + ;; + "C192" | "C96" | "C48") + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + walltime="00:15:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + "prepobsaero") walltime="00:30:00" ntasks=1 @@ -356,12 +401,12 @@ case ${step} in layout_y=8 ;; "C384") - layout_x=8 - layout_y=8 + layout_x=6 + layout_y=6 ;; "C192" | "C96") - layout_x=8 - layout_y=8 + layout_x=4 + layout_y=4 ;; "C48" ) # this case is for testing only @@ -382,27 +427,61 @@ case ${step} in memory="3072M" ;; - "aeroanlrun") + "aeroanlvar") case ${CASE} in "C768") layout_x=8 layout_y=8 ;; "C384") - layout_x=8 - layout_y=8 + layout_x=6 + layout_y=6 ;; "C192" | "C96") + layout_x=4 + layout_y=4 + ;; + "C48" ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + walltime="00:30:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export is_exclusive=True + ;; + + "aeroanlgenb") + case ${CASE} in + "C768") layout_x=8 layout_y=8 ;; + "C384") + layout_x=6 + layout_y=6 + ;; + "C192" | "C96") + layout_x=4 + layout_y=4 + ;; "C48" ) # this case is for testing only layout_x=1 layout_y=1 ;; *) - echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" exit 4 esac @@ -414,8 +493,10 @@ case ${step} in threads_per_task=1 tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True + ;; + "aeroanlfinal") walltime="00:10:00" ntasks=1 @@ -959,6 +1040,30 @@ case ${step} in memory="3072M" ;; + "atmensanlobs") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + walltime="00:30:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="96GB" + export is_exclusive=True + ;; + + "atmensanlsol") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + walltime="00:30:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="96GB" + export is_exclusive=True + ;; + "atmensanlletkf") export layout_x=${layout_x_atmensanl} export layout_y=${layout_y_atmensanl} @@ -1086,9 +1191,9 @@ case ${step} in "postsnd") walltime="02:00:00" - ntasks=40 - threads_per_task=8 - tasks_per_node=10 + export ntasks=141 + threads_per_task=6 + export tasks_per_node=21 export ntasks_postsndcfp=9 export tasks_per_node_postsndcfp=1 postsnd_req_cores=$(( tasks_per_node * threads_per_task )) diff --git a/parm/config/gfs/config.resources.AWSPW b/parm/config/gfs/config.resources.AWSPW index 8649713bb7..a735c7622d 100644 --- a/parm/config/gfs/config.resources.AWSPW +++ b/parm/config/gfs/config.resources.AWSPW @@ -3,6 +3,7 @@ # AWS-specific job resources export is_exclusive="True" +unset memory # shellcheck disable=SC2312 for mem_var in $(env | grep '^memory_' | cut -d= -f1); do diff --git a/parm/config/gfs/config.resources.AZUREPW b/parm/config/gfs/config.resources.AZUREPW new file mode 100644 index 0000000000..96303139d8 --- /dev/null +++ b/parm/config/gfs/config.resources.AZUREPW @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +# AZURE-specific job resources + +export is_exclusive="True" +unset memory + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/parm/config/gfs/config.resources.GOOGLEPW b/parm/config/gfs/config.resources.GOOGLEPW new file mode 100644 index 0000000000..21e54013c7 --- /dev/null +++ b/parm/config/gfs/config.resources.GOOGLEPW @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +# GOOGLE-specific job resources + +export is_exclusive="True" +unset memory + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/parm/config/gfs/config.resources.HERA b/parm/config/gfs/config.resources.HERA index 36f50508c3..e79d4c5b0a 100644 --- a/parm/config/gfs/config.resources.HERA +++ b/parm/config/gfs/config.resources.HERA @@ -11,6 +11,19 @@ case ${step} in fi ;; + "atmanlvar") + export tasks_per_node_gdas=12 + export tasks_per_node_gfs=12 + ;; + + "atmensanlobs") + export tasks_per_node=12 + ;; + + "atmensanlsol") + export tasks_per_node=12 + ;; + "eupd") case ${CASE} in "C384") diff --git a/parm/config/gfs/config.resources.HERCULES b/parm/config/gfs/config.resources.HERCULES index 7a5a74f69c..65ea508e01 100644 --- a/parm/config/gfs/config.resources.HERCULES +++ b/parm/config/gfs/config.resources.HERCULES @@ -11,6 +11,17 @@ case ${step} in export tasks_per_node=20 fi ;; + "atmanlvar") + export tasks_per_node_gdas=48 + export tasks_per_node_gfs=48 + export memory="400GB" + ;; + + "atmensanlobs") + export tasks_per_node=48 + export memory="400GB" + ;; + *) ;; esac diff --git a/parm/config/gfs/config.resources.ORION b/parm/config/gfs/config.resources.ORION index e3e81b0182..6b42d780d4 100644 --- a/parm/config/gfs/config.resources.ORION +++ b/parm/config/gfs/config.resources.ORION @@ -9,8 +9,13 @@ case ${step} in # Remove this block once GSI issue is resolved # https://github.com/NOAA-EMC/GSI/pull/764 # https://github.com/JCSDA/spack-stack/issues/1166 - export wtime_anal_gdas="02:40:00" - export wtime_anal_gfs="02:00:00" + export walltime_gdas="02:40:00" + export walltime_gfs="02:00:00" + ;; + "eobs") + # TODO: + # Remove this block once the GSI issue is resolved. + export walltime="00:45:00" ;; *) ;; diff --git a/parm/config/gfs/config.resources.WCOSS2 b/parm/config/gfs/config.resources.WCOSS2 index a0a69fa8d1..3ff019068c 100644 --- a/parm/config/gfs/config.resources.WCOSS2 +++ b/parm/config/gfs/config.resources.WCOSS2 @@ -18,6 +18,17 @@ case ${step} in fi ;; + "atmanlvar") + export tasks_per_node_gdas=48 + export tasks_per_node_gfs=48 + export memory="400GB" + ;; + + "atmensanlobs") + export tasks_per_node=48 + export memory="400GB" + ;; + "fit2obs") export tasks_per_node=3 ;; diff --git a/parm/config/gfs/config.stage_ic b/parm/config/gfs/config.stage_ic index 9956e8af6a..7aa0c25f32 100644 --- a/parm/config/gfs/config.stage_ic +++ b/parm/config/gfs/config.stage_ic @@ -7,39 +7,26 @@ echo "BEGIN: config.stage_ic" # Get task specific resources source "${EXPDIR}/config.resources" stage_ic -case "${CASE}" in - "C48" | "C96" | "C192") - export CPL_ATMIC="workflow_${CASE}_refactored" - export CPL_ICEIC="workflow_${CASE}_refactored" - export CPL_OCNIC="workflow_${CASE}_refactored" - export CPL_WAVIC="workflow_${CASE}_refactored" - ;; - "C384") - export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c_refactored - export CPL_ICEIC=CPC_refactored - export CPL_OCNIC=CPC3Dvar_refactored - export CPL_WAVIC=workflow_C384_refactored - ;; - "C768") - export CPL_ATMIC=HR3C768 - export CPL_ICEIC=HR3marine - export CPL_OCNIC=HR3marine - export CPL_WAVIC=HR3marine - ;; - "C1152") - export CPL_ATMIC=HR3C1152 - export CPL_ICEIC=HR3marine - export CPL_OCNIC=HR3marine - export CPL_WAVIC=HR3marine - ;; - *) - echo "FATAL ERROR Unrecognized resolution: ${CASE}" - exit 1 - ;; -esac - -if [[ "${DO_NEST:-NO}" == "YES" ]] ; then - export CPL_ATMIC="GLOBAL-NEST_${CASE}" +export ICSDIR="@ICSDIR@" # User provided ICSDIR; blank if not provided +export BASE_IC="@BASE_IC@" # Platform home for staged ICs + +export STAGE_IC_YAML_TMPL="${PARMgfs}/stage/master_gfs.yaml.j2" + +# Set ICSDIR (if not defined) +if [[ -z "${ICSDIR}" ]] ; then + + ic_ver="20240610" + + if (( NMEM_ENS > 0 )) ; then + ensic="${CASE_ENS}" + fi + + if [[ "${DO_OCN:-NO}" == "YES" ]] ; then + ocnic="mx${OCNRES}" + fi + + export ICSDIR="${BASE_IC}/${CASE}${ensic:-}${ocnic:-}/${ic_ver}" + fi echo "END: config.stage_ic" diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 148793927f..b27845aec7 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -356,6 +356,25 @@ export ntasks_fv3_gfs export ntasks_quilt_gdas export ntasks_quilt_gfs +# Determine whether to use compression in the write grid component based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + zstandard_level=0 + ideflate=0 + quantize_nsd=0 + ;; + "C768" | "C1152" | "C3072") + zstandard_level=0 + ideflate=1 + quantize_nsd=5 + ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; +esac +export zstandard_level ideflate quantize_nsd + # Determine whether to use parallel NetCDF based on resolution case ${fv3_res} in "C48" | "C96" | "C192" | "C384") diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index 24729ac43e..05e1b24012 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -31,12 +31,19 @@ atmanl: IO_LAYOUT_Y: 1 atmensanl: - JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2" + JCB_ALGO_YAML_LETKF: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2" + JCB_ALGO_YAML_OBS: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf_observer.yaml.j2" LAYOUT_X_ATMENSANL: 8 LAYOUT_Y_ATMENSANL: 8 IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 +atmensanlobs: + JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf_observer.yaml.j2" + +atmensanlsol: + JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf_solver.yaml.j2" + aeroanl: IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 diff --git a/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 b/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 new file mode 100644 index 0000000000..b33f280945 --- /dev/null +++ b/parm/gdas/aero_finalize_bmatrix_bkg.yaml.j2 @@ -0,0 +1,19 @@ +{% set cycle_HH = current_cycle | strftime("%H") %} +{% set HEAD = RUN + ".t" + cycle_HH + "z." %} +{% set offset_td = "+6H" | to_timedelta %} +{% set background_time = current_cycle | add_to_datetime(offset_td) %} +copy: +### copy YAMLs used +{% set yaml_list = ['chem_diagb.yaml', 'chem_diffusion.yaml'] %} +{% for fname in yaml_list %} +- ["{{ DATA }}/{{ HEAD }}{{ fname }}", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}{{ fname }}"] +{% endfor %} +### copy stddev files to ROTDIR +{% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/stddev/{{ background_time | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_CHEM_BMAT }}/{{ background_time | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} +### copy coupler file +- ["{{ DATA }}/stddev/{{ background_time | to_fv3time }}.stddev.coupler.res", "{{ COMOUT_CHEM_BMAT }}/{{ background_time | to_fv3time }}.stddev.coupler.res"] +### copy diffusion files +- ["{{ DATA }}/diffusion/diffusion_hz.nc", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}aero_diffusion_hz.nc"] +- ["{{ DATA }}/diffusion/diffusion_vt.nc", "{{ COMOUT_CHEM_BMAT }}/{{ HEAD }}aero_diffusion_vt.nc"] diff --git a/parm/gdas/aero_finalize_variational.yaml.j2 b/parm/gdas/aero_finalize_variational.yaml.j2 new file mode 100644 index 0000000000..7dadd36291 --- /dev/null +++ b/parm/gdas/aero_finalize_variational.yaml.j2 @@ -0,0 +1,24 @@ +###################################### +# set some variables +###################################### +{% if DOIAU == True %} + {% set bkgtime = AERO_WINDOW_BEGIN %} +{% else %} + {% set bkgtime = current_cycle %} +{% endif %} +###################################### +mkdir: +- "{{ COMOUT_CHEM_ANALYSIS }}" +- "{{ COMOUT_ATMOS_RESTART }}" +copy: +## copy variational YAML to ROTDIR +- ["{{ DATA }}/{{ APREFIX }}aerovar.yaml", "{{ COMOUT_CHEM_ANALYSIS }}/{{ APREFIX }}aerovar.yaml"] +## copy increments +{% for tile in range(1,ntiles+1) %} +- ["{{ DATA }}/anl/aeroinc.{{ current_cycle | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_CHEM_ANALYSIS }}/aeroinc.{{ current_cycle | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} +- ["{{ DATA }}/anl/aeroinc_gauss.{{ current_cycle | to_isotime }}.gaussian.modelLevels.nc", "{{ COMOUT_CHEM_ANALYSIS }}/{{ APREFIX }}aeroinc.nc"] +## copy analysis +{% for tile in range(1,ntiles+1) %} +- ["{{ DATA }}/anl/{{ bkgtime | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ COMOUT_ATMOS_RESTART }}/{{ bkgtime | to_fv3time }}.aeroanl_fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} diff --git a/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 b/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 new file mode 100644 index 0000000000..9005b9ff12 --- /dev/null +++ b/parm/gdas/aero_stage_bmatrix_bkg.yaml.j2 @@ -0,0 +1,38 @@ +###################################### +# set some variables +###################################### +{% set offset_td = "+6H" | to_timedelta %} +{% set background_time = current_cycle | add_to_datetime(offset_td) %} +{% set ftype_list = ['fv_core.res', 'fv_tracer.res'] %} +###################################### +# create working directories +###################################### +mkdir: +- "{{ DATA }}/bkg" +- "{{ DATA }}/stddev" +- "{{ DATA }}/clm_stddev" +- "{{ DATA }}/diffusion" +copy: +###################################### +# copy deterministic background files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD, + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':""} %} + +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ background_time | to_fv3time }}.coupler.res", "{{ DATA }}/bkg/{{ background_time | to_fv3time }}.coupler.res"] +{% for ftype in ftype_list %} + {% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ background_time | to_fv3time }}.{{ ftype }}.tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ background_time | to_fv3time }}.{{ ftype }}.tile{{ tile }}.nc"] + {% endfor %} +{% endfor %} +# copy climatological stddev files +###################################### +{% for tile in range(1, ntiles+1) %} +- ["{{ BERROR_DATA_DIR }}/stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/clm_stddev/stddev.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} + diff --git a/parm/gdas/aero_stage_variational.yaml.j2 b/parm/gdas/aero_stage_variational.yaml.j2 new file mode 100644 index 0000000000..afd0e1b946 --- /dev/null +++ b/parm/gdas/aero_stage_variational.yaml.j2 @@ -0,0 +1,50 @@ +###################################### +# set some variables +###################################### +{% if DOIAU == True %} + {% set bkg_times = [] %} + {% for fh in range(0, 7, 3) %} + {% set offset = fh | string + "H" %} + {% set fcst_timedelta = offset | to_timedelta %} + {% set fcst_time = AERO_WINDOW_BEGIN | add_to_datetime(fcst_timedelta) %} + {% do bkg_times.append(fcst_time) %} + {% endfor %} +{% else %} + {% set bkg_times = [] %} + {% do bkg_times.append(current_cycle) %} +{% endif %} +{% set fvfiles = ['fv_core.res.', 'fv_tracer.res.'] %} +###################################### +mkdir: +- "{{ DATA }}/anl" +- "{{ DATA }}/diags" +- "{{ DATA }}/berror" +- "{{ DATA }}/bkg" +copy: +###################################### +## copy backgrounds +{% for bkgtime in bkg_times %} +- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkgtime | to_fv3time }}.coupler.res", "{{ DATA }}/bkg/{{ bkgtime | to_fv3time }}.coupler.res"] + {% for fvfile in fvfiles %} + {% for tile in range(1,ntiles+1) %} +- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkgtime | to_fv3time }}.{{ fvfile }}tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ bkgtime | to_fv3time }}.{{ fvfile }}tile{{ tile }}.nc"] + {% endfor %} + {% endfor %} +{% endfor %} +###################################### +## copy backgrounds again for fv_tracer to create analysis files later +{% for tile in range(1,ntiles+1) %} +- ["{{ COMIN_ATMOS_RESTART_PREV }}/{{ bkg_times[0] | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/anl/{{ bkg_times[0] | to_fv3time }}.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} + +###################################### +## copy berror files from COMIN_CHEM_BMAT_PREV +## stddev files +{% for tile in range(1, ntiles+1) %} +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ current_cycle | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc", "{{ DATA }}/berror/{{ current_cycle | to_fv3time }}.stddev.fv_tracer.res.tile{{ tile }}.nc"] +{% endfor %} +### copy coupler file +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ current_cycle | to_fv3time }}.stddev.coupler.res", "{{ DATA }}/berror/{{ current_cycle | to_fv3time }}.stddev.coupler.res"] +### copy diffusion files +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ GPREFIX }}aero_diffusion_hz.nc", "{{ DATA }}/berror/diffusion_hz.nc"] +- ["{{ COMIN_CHEM_BMAT_PREV }}/{{ GPREFIX }}aero_diffusion_vt.nc", "{{ DATA }}/berror/diffusion_vt.nc"] diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 new file mode 100644 index 0000000000..a2a5763ab8 --- /dev/null +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -0,0 +1,43 @@ +copy: +###################################### +# copy analyses to directories +###################################### +{% for mem in range(1, NMEM_ENS + 1) %} + # define variables + # Declare a dict of search and replace terms to run on each template + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD , + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':"mem" + '%03d' % mem} %} + + {% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% if DOIAU == True %} + # if using IAU, also need analyses copied at the beginning of the window + {% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% endif %} +{% endfor %} +###################################### +# copy ensemble mean increment to COM +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD , + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':"ensstat"} %} + +{% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +{% if DOIAU == True %} + # if using IAU, also need increment copied at the beginning of the window + {% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} +{% endif %} diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 new file mode 100644 index 0000000000..4ad5499751 --- /dev/null +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -0,0 +1,76 @@ +###################################### +# set some variables +###################################### +{% if DOIAU == True %} + {% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} +{% else %} + {% set bkg_time = current_cycle | to_fv3time %} +{% endif %} +###################################### +# create working directories +###################################### +mkdir: +- "{{ DATA }}/bkg/det" +- "{{ DATA }}/bkg/det_ensres" +- "{{ DATA }}/inc/det" +- "{{ DATA }}/inc/det_ensres" +- "{{ DATA }}//inc/ensmean" +{% for mem in range(1, NMEM_ENS + 1) %} +- "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" +- "{{ DATA }}/anl/mem{{ '%03d' % mem }}" +{% endfor %} +copy: +###################################### +# copy deterministic background files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':GDUMP, + '${YMD}':previous_cycle | to_YMD, + '${HH}':previous_cycle | strftime("%H"), + '${MEMDIR}':""} %} + +{% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +###################################### +# copy deterministic increment files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':GDUMP, + '${YMD}':current_cycle | to_YMD, + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':""} %} + +{% for tile in range(1, ntiles+1) %} +- ["{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +###################################### +# copy ensemble background files +###################################### +{% for mem in range(1, NMEM_ENS + 1) %} + # define variables + # Declare a dict of search and replace terms to run on each template + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':previous_cycle | to_YMD, + '${HH}':previous_cycle | strftime("%H"), + '${MEMDIR}':"mem" + '%03d' % mem} %} + + # we need to copy them to two places, one serves as the basis for the analysis + {% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% if DOIAU == True %} + # if using IAU, also need backgrounds copied at the beginning of the window + # we need to copy them to two places, one serves as the basis for the analysis + {% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% endif %} +{% endfor %} diff --git a/parm/gdas/snow_stage_orog.yaml.j2 b/parm/gdas/snow_stage_orog.yaml.j2 new file mode 100644 index 0000000000..3cd7d5c327 --- /dev/null +++ b/parm/gdas/snow_stage_orog.yaml.j2 @@ -0,0 +1,12 @@ +mkdir: +- "{{ DATA }}/orog/det" +- "{{ DATA }}/orog/ens" +copy: +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_mosaic.nc", "{{ DATA }}/orog/det/{{ CASE }}_mosaic.nc"] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_mosaic.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_mosaic.nc"] +{% for tile in range(1, ntiles+1) %} +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +{% endfor %} diff --git a/parm/post/upp.yaml b/parm/post/upp.yaml index a39e2be877..41dbb7defb 100644 --- a/parm/post/upp.yaml +++ b/parm/post/upp.yaml @@ -18,7 +18,7 @@ analysis: rdaod: True data_in: copy: - - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-ANL.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-anl.txt", "{{ DATA }}/postxconfig-NT.txt"] - ["{{ COM_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmanl.nc", "{{ DATA }}/{{ atmos_filename }}"] - ["{{ COM_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcanl.nc", "{{ DATA }}/{{ flux_filename }}"] data_out: @@ -32,9 +32,9 @@ forecast: data_in: copy: {% if forecast_hour == 0 %} - - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-F00-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-f00-two.txt", "{{ DATA }}/postxconfig-NT.txt"] {% else %} - - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-two.txt", "{{ DATA }}/postxconfig-NT.txt"] {% endif %} - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"] - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"] @@ -81,10 +81,10 @@ goes: {% endfor %} - ["{{ 'CRTM_FIX' | getenv }}/AerosolCoeff.bin", "{{ DATA }}/"] - ["{{ 'CRTM_FIX' | getenv }}/CloudCoeff.bin", "{{ DATA }}/"] - - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-GOES.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-goes.txt", "{{ DATA }}/postxconfig-NT.txt"] - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"] - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"] data_out: copy: - - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.special.grb2f{{ '%03d' % forecast_hour }}"] - - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.special.grb2if{{ '%03d' % forecast_hour }}"] + - ["{{ DATA }}/GFSGOES.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.special.grb2f{{ '%03d' % forecast_hour }}"] + - ["{{ DATA }}/GFSGOES.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.special.grb2if{{ '%03d' % forecast_hour }}"] diff --git a/parm/product/bufr_ij9km.txt b/parm/product/bufr_ij_gfs_C1152.txt similarity index 100% rename from parm/product/bufr_ij9km.txt rename to parm/product/bufr_ij_gfs_C1152.txt diff --git a/parm/product/bufr_ij13km.txt b/parm/product/bufr_ij_gfs_C768.txt similarity index 100% rename from parm/product/bufr_ij13km.txt rename to parm/product/bufr_ij_gfs_C768.txt diff --git a/parm/product/bufr_ij_gfs_C96.txt b/parm/product/bufr_ij_gfs_C96.txt new file mode 100644 index 0000000000..c005cc3170 --- /dev/null +++ b/parm/product/bufr_ij_gfs_C96.txt @@ -0,0 +1,2115 @@ + 1 235 22 69.58 -140.18 + 2 247 21 69.90 -128.97 + 3 256 22 69.58 -120.75 + 4 244 26 65.00 -132.00 + 5 256 26 65.00 -120.00 + 6 275 26 65.10 -102.43 + 7 235 32 60.00 -140.00 + 8 256 32 60.00 -120.00 + 9 278 32 60.00 -100.00 + 10 249 35 57.00 -127.00 + 11 226 21 70.20 -148.47 + 12 271 39 53.63 -106.20 + 13 216 24 67.10 -157.85 + 14 272 38 53.99 -105.12 + 15 259 39 52.88 -118.07 + 16 270 29 62.50 -107.00 + 17 267 49 43.90 -110.00 + 18 281 47 45.80 -97.45 + 19 283 49 43.50 -95.60 + 20 286 60 33.22 -92.80 + 21 283 51 42.04 -94.79 + 22 285 51 42.11 -92.92 + 23 293 60 34.10 -86.10 + 24 294 58 35.20 -84.80 + 25 295 59 34.80 -84.05 + 26 298 57 36.72 -80.97 + 27 264 44 48.32 -113.35 + 28 270 51 42.20 -107.20 + 29 273 50 42.76 -104.45 + 30 275 51 42.30 -102.40 + 31 278 50 42.60 -99.90 + 32 271 54 39.10 -106.20 + 33 290 56 37.30 -88.90 + 34 286 53 40.20 -92.60 + 35 280 59 34.90 -98.10 + 36 272 44 48.31 -105.10 + 37 289 59 34.25 -89.87 + 38 272 53 40.13 -105.24 + 39 281 56 37.48 -96.93 + 40 295 58 35.96 -84.29 + 41 277 50 42.86 -100.41 + 42 284 52 41.02 -94.36 + 43 302 51 42.04 -77.76 + 44 300 54 39.17 -79.52 + 45 289 62 31.71 -89.41 + 46 299 48 44.92 -80.42 + 47 274 48 45.27 -103.54 + 48 273 48 44.79 -104.73 + 49 274 48 44.46 -103.85 + 50 273 50 43.35 -104.69 + 51 274 49 43.53 -103.65 + 52 282 47 45.50 -95.90 + 53 285 49 44.10 -93.50 + 54 260 58 35.34 -116.88 + 55 291 59 34.40 -87.60 + 56 255 45 47.50 -121.10 + 57 254 48 44.50 -122.70 + 58 262 49 44.00 -115.00 + 59 255 46 46.90 -121.30 + 60 256 49 44.10 -120.50 + 61 255 50 42.70 -121.40 + 62 266 61 32.42 -110.73 + 63 281 57 36.61 -97.49 + 64 255 47 45.68 -121.27 + 65 255 47 45.72 -121.56 + 66 256 47 45.68 -120.82 + 67 256 47 45.72 -120.21 + 68 255 55 38.06 -121.77 + 69 254 56 37.10 -122.28 + 70 254 56 37.82 -122.47 + 71 254 55 37.94 -122.50 + 72 255 56 37.07 -121.12 + 73 264 53 40.20 -113.30 + 74 306 53 40.42 -73.98 + 75 305 56 37.20 -74.80 + 76 305 57 36.05 -74.25 + 77 302 61 32.80 -77.20 + 78 306 55 38.60 -73.75 + 79 291 65 29.20 -87.25 + 80 293 46 46.31 -85.46 + 81 291 46 47.18 -87.22 + 82 290 50 42.97 -88.55 + 83 307 48 44.94 -72.51 + 84 297 56 37.27 -82.10 + 85 297 56 90.00 0.00 + 86 217 17 74.30 -156.60 + 87 235 16 75.00 -140.00 + 88 200 31 60.60 -173.30 + 89 212 32 59.90 -161.75 + 90 225 29 62.88 -149.83 + 91 226 31 60.79 -148.83 + 92 225 31 60.49 -149.79 + 93 225 31 60.95 -149.14 + 94 226 31 60.78 -148.72 + 95 263 65 29.22 -114.28 + 96 297 57 36.20 -81.65 + 97 291 50 42.59 -87.94 + 98 259 50 42.59 -117.87 + 99 254 46 47.08 -122.36 + 100 252 46 46.91 -124.11 + 101 288 45 47.66 -90.91 + 102 287 46 46.77 -91.25 + 103 213 37 55.31 -160.52 + 104 302 58 35.33 -77.60 + 105 307 48 44.53 -72.61 + 106 299 51 41.63 -80.21 + 107 298 55 38.69 -80.65 + 108 259 59 34.57 -117.67 + 109 259 59 34.63 -117.61 + 110 293 47 45.97 -86.17 + 111 292 46 46.42 -86.65 + 112 289 54 39.16 -89.67 + 113 276 49 44.05 -101.60 + 114 293 49 43.58 -86.24 + 115 294 50 43.43 -85.30 + 116 294 50 43.37 -84.44 + 117 273 59 34.38 -104.23 + 118 333 46 46.70 -48.00 + 119 326 46 47.20 -55.10 + 120 293 50 42.75 -86.10 + 121 292 51 42.41 -86.28 + 122 259 48 45.35 -117.23 + 123 283 52 41.59 -95.34 + 124 291 49 43.78 -87.85 + 125 275 63 30.90 -102.85 + 126 287 60 33.64 -91.75 + 127 286 57 36.22 -92.28 + 128 274 62 31.38 -103.51 + 129 298 68 26.42 -81.44 + 130 298 67 26.75 -80.94 + 131 279 63 31.18 -99.32 + 132 277 63 30.59 -100.65 + 133 255 54 39.08 -120.94 + 134 256 56 37.51 -120.04 + 135 256 56 37.58 -120.27 + 136 257 57 36.83 -119.33 + 137 255 58 35.85 -121.31 + 138 255 45 47.31 -121.85 + 139 255 55 38.49 -121.22 + 140 256 54 39.13 -120.80 + 141 254 52 40.72 -122.43 + 142 305 53 40.50 -74.45 + 143 310 51 42.99 -70.62 + 144 306 47 45.50 -73.57 + 145 307 53 41.10 -72.89 + 146 254 54 39.69 -121.91 + 147 255 54 39.17 -121.11 + 148 257 58 35.62 -119.69 + 149 255 58 35.66 -121.28 + 150 253 55 38.61 -123.21 + 151 253 55 38.51 -123.22 + 152 253 55 38.78 -122.99 + 153 256 56 37.11 -120.24 + 154 254 53 39.99 -122.06 + 155 253 54 39.00 -123.12 + 156 253 55 38.51 -122.96 + 157 271 60 33.50 -106.18 + 158 271 60 33.82 -106.65 + 159 265 53 40.58 -111.63 + 160 296 49 43.78 -82.99 + 161 233 34 58.00 -142.00 + 162 232 38 54.00 -143.00 + 163 238 36 56.00 -137.00 + 164 219 37 55.00 -155.00 + 165 236 39 53.00 -139.00 + 166 227 41 51.10 -147.40 + 167 212 40 52.00 -162.00 + 168 221 43 49.50 -153.70 + 169 204 43 49.20 -168.80 + 170 240 43 50.00 -135.00 + 171 247 55 38.50 -129.00 + 172 250 60 34.00 -126.00 + 173 253 64 29.50 -123.00 + 174 256 69 25.00 -120.00 + 175 262 69 25.00 -115.00 + 176 234 49 44.30 -140.80 + 177 231 45 47.70 -144.10 + 178 238 56 37.40 -137.00 + 179 237 46 47.00 -138.00 + 180 240 49 43.60 -135.50 + 181 242 54 39.70 -133.30 + 182 246 44 48.50 -129.50 + 183 248 48 44.60 -127.50 + 184 234 42 50.50 -141.38 + 185 249 53 40.50 -126.98 + 186 182 37 55.00 170.00 + 187 192 37 55.00 180.00 + 188 187 32 60.00 175.00 + 189 267 50 42.80 -109.81 + 190 254 54 39.15 -122.15 + 191 255 52 40.88 -121.66 + 192 256 55 37.99 -120.38 + 193 258 56 37.74 -118.59 + 194 257 57 36.20 -119.10 + 195 258 58 35.97 -118.54 + 196 258 59 34.83 -118.95 + 197 256 57 36.14 -120.35 + 198 258 57 36.65 -118.48 + 199 306 51 42.47 -73.29 + 200 297 54 39.21 -82.23 + 201 266 53 40.48 -111.43 + 202 263 61 33.02 -114.24 + 203 267 62 31.49 -110.30 + 204 269 62 32.02 -107.87 + 205 273 63 30.43 -104.33 + 206 278 66 28.39 -100.29 + 207 279 68 26.57 -98.82 + 208 282 65 28.71 -95.96 + 209 287 64 29.81 -91.66 + 210 298 70 24.70 -81.51 + 211 313 77 17.98 -67.08 + 212 255 47 46.19 -121.70 + 213 254 47 46.28 -122.28 + 214 254 48 45.35 -121.94 + 215 254 49 44.17 -122.06 + 216 279 48 45.03 -99.11 + 217 281 47 45.46 -96.99 + 218 283 55 38.28 -95.22 + 219 281 53 39.86 -96.63 + 220 271 49 44.38 -106.72 + 221 269 48 44.52 -108.08 + 222 288 48 45.10 -90.30 + 223 286 48 44.46 -92.29 + 224 270 56 37.29 -107.06 + 225 269 55 38.23 -108.56 + 226 269 53 40.05 -107.89 + 227 287 51 41.64 -91.54 + 228 272 54 39.05 -105.51 + 229 271 53 40.05 -106.36 + 230 291 55 38.76 -87.61 + 231 290 54 39.07 -88.53 + 232 288 52 40.94 -90.43 + 233 288 53 39.77 -90.24 + 234 290 54 39.48 -88.28 + 235 278 50 43.39 -99.84 + 236 275 50 43.02 -102.52 + 237 273 49 43.89 -104.32 + 238 275 48 44.56 -102.66 + 239 272 49 43.74 -105.74 + 240 289 47 46.15 -89.21 + 241 291 46 46.54 -87.39 + 242 293 46 46.68 -85.97 + 243 290 46 46.61 -88.91 + 244 292 66 28.00 -87.00 + 245 285 66 28.20 -93.70 + 246 286 66 28.50 -92.30 + 247 287 66 27.90 -91.00 + 248 290 66 28.00 -89.00 + 249 294 66 28.00 -85.00 + 250 300 63 31.00 -79.00 + 251 316 45 48.00 -65.00 + 252 266 54 39.30 -111.46 + 253 273 57 36.74 -104.65 + 254 302 56 36.99 -77.00 + 255 302 56 37.86 -76.89 + 256 303 55 38.54 -76.03 + 257 302 57 36.77 -77.79 + 258 312 49 43.78 -68.86 + 259 299 55 38.40 -80.00 + 260 259 58 35.10 -117.56 + 261 274 50 43.37 -103.39 + 262 274 49 44.41 -103.48 + 263 290 51 42.21 -88.32 + 264 291 53 40.46 -88.10 + 265 299 61 32.78 -79.92 + 266 299 61 32.66 -79.93 + 267 298 62 32.03 -80.89 + 268 259 45 47.97 -117.43 + 269 298 57 36.22 -81.10 + 270 297 52 42.47 -82.76 + 271 294 48 43.80 -83.72 + 272 296 49 44.02 -82.79 + 273 292 51 41.69 -87.15 + 274 291 52 41.61 -88.10 + 275 299 57 36.46 -80.55 + 276 264 56 37.20 -112.99 + 277 266 55 38.29 -111.26 + 278 265 55 38.74 -112.10 + 279 258 56 37.20 -118.80 + 280 274 45 47.61 -103.26 + 281 281 56 37.80 -97.01 + 282 284 59 34.60 -94.30 + 283 283 55 37.90 -95.20 + 284 276 55 38.50 -101.50 + 285 272 56 37.40 -105.20 + 286 272 57 36.00 -105.30 + 287 276 59 34.20 -101.70 + 288 279 60 33.60 -99.30 + 289 281 60 33.70 -97.40 + 290 286 56 37.10 -92.30 + 291 270 52 41.00 -107.00 + 292 263 47 46.25 -114.15 + 293 264 46 47.00 -112.50 + 294 263 44 49.00 -114.00 + 295 269 43 49.50 -108.00 + 296 274 44 49.00 -104.00 + 297 279 45 47.50 -99.00 + 298 280 47 46.20 -97.50 + 299 283 47 46.00 -95.00 + 300 295 52 40.80 -84.20 + 301 299 55 38.00 -80.00 + 302 296 59 34.50 -82.50 + 303 280 51 41.73 -98.01 + 304 283 52 41.58 -95.34 + 305 281 51 42.24 -96.98 + 306 279 56 37.70 -98.75 + 307 280 44 48.75 -98.39 + 308 281 44 48.75 -96.94 + 309 282 48 44.46 -96.25 + 310 283 50 43.17 -95.21 + 311 278 50 43.22 -99.40 + 312 279 50 43.26 -98.76 + 313 280 52 40.90 -97.62 + 314 278 52 40.79 -99.78 + 315 280 53 40.15 -97.58 + 316 279 54 39.73 -99.32 + 317 288 57 36.77 -90.32 + 318 291 56 37.36 -87.40 + 319 293 54 39.05 -85.61 + 320 296 56 37.75 -82.64 + 321 295 57 36.61 -83.74 + 322 294 57 36.86 -84.86 + 323 295 55 38.06 -83.98 + 324 276 52 41.12 -101.77 + 325 277 51 41.96 -100.57 + 326 267 49 43.55 -109.69 + 327 266 50 42.71 -110.94 + 328 267 51 41.82 -110.56 + 329 268 48 44.87 -108.79 + 330 269 48 44.91 -108.45 + 331 271 49 43.71 -106.63 + 332 289 49 43.52 -89.77 + 333 290 49 43.77 -88.49 + 334 288 50 42.89 -90.24 + 335 290 50 43.04 -88.24 + 336 289 49 44.04 -89.31 + 337 289 52 41.35 -89.15 + 338 292 51 41.70 -86.82 + 339 291 52 41.07 -87.85 + 340 289 51 41.89 -89.08 + 341 287 47 45.42 -91.77 + 342 284 48 45.10 -94.51 + 343 282 48 44.73 -96.27 + 344 275 53 40.10 -102.24 + 345 275 55 38.76 -102.79 + 346 292 48 45.29 -86.98 + 347 289 49 44.36 -89.84 + 348 305 53 39.99 -74.17 + 349 293 56 37.70 -85.87 + 350 280 56 37.67 -98.12 + 351 280 56 37.28 -98.04 + 352 282 55 38.30 -95.72 + 353 282 56 37.85 -96.29 + 354 280 55 38.75 -98.23 + 355 307 54 39.55 -73.90 + 356 308 54 39.70 -71.60 + 357 283 56 37.09 -95.57 + 358 280 55 38.35 -97.69 + 359 279 56 37.35 -99.35 + 360 279 58 35.30 -98.90 + 361 281 58 35.50 -97.00 + 362 283 59 34.98 -94.69 + 363 276 57 36.60 -101.60 + 364 301 51 42.50 -78.68 + 365 304 49 43.76 -75.68 + 366 287 61 32.35 -91.03 + 367 298 68 25.86 -81.38 + 368 297 65 28.84 -82.33 + 369 296 62 32.07 -82.90 + 370 291 62 31.71 -87.78 + 371 290 60 33.45 -88.82 + 372 287 59 34.89 -91.20 + 373 293 58 35.48 -86.09 + 374 291 58 35.62 -87.84 + 375 294 56 36.95 -85.26 + 376 275 60 33.62 -103.02 + 377 278 58 35.21 -100.25 + 378 277 54 39.13 -100.87 + 379 287 56 37.01 -91.36 + 380 284 55 38.37 -93.79 + 381 286 54 39.42 -92.44 + 382 300 59 34.61 -79.06 + 383 309 51 41.65 -70.52 + 384 298 60 33.46 -80.85 + 385 287 47 45.50 -91.00 + 386 285 47 45.89 -93.27 + 387 286 48 45.15 -92.54 + 388 282 48 45.23 -96.00 + 389 288 49 44.03 -90.08 + 390 254 52 41.32 -122.32 + 391 257 50 42.55 -119.66 + 392 256 50 43.33 -120.84 + 393 253 52 41.39 -123.49 + 394 255 52 41.43 -121.46 + 395 259 47 45.36 -117.25 + 396 258 49 44.40 -118.96 + 397 302 57 36.33 -77.64 + 398 299 59 34.89 -79.76 + 399 298 53 40.47 -81.42 + 400 300 52 40.82 -79.53 + 401 300 53 40.63 -79.11 + 402 300 54 39.58 -79.34 + 403 299 53 40.14 -80.29 + 404 304 51 42.46 -75.06 + 405 276 57 36.68 -101.50 + 406 292 61 33.17 -86.77 + 407 289 63 31.27 -89.26 + 408 298 66 28.29 -81.44 + 409 297 65 28.82 -81.81 + 410 283 64 30.36 -95.41 + 411 283 63 30.73 -95.47 + 412 276 48 45.02 -102.02 + 413 278 65 29.21 -99.74 + 414 280 58 35.87 -98.42 + 415 290 52 40.92 -88.62 + 416 307 52 41.56 -73.05 + 417 260 44 48.69 -116.32 + 418 258 44 48.65 -118.73 + 419 256 45 47.76 -120.65 + 420 256 44 48.49 -120.24 + 421 261 45 47.54 -116.14 + 422 253 52 40.73 -122.94 + 423 253 52 40.94 -123.63 + 424 253 53 40.34 -123.07 + 425 253 53 39.75 -123.21 + 426 285 57 36.54 -93.20 + 427 203 28 63.68 -170.50 + 428 223 23 68.13 -151.73 + 429 201 28 63.77 -171.73 + 430 207 31 60.37 -166.27 + 431 212 35 56.65 -161.37 + 432 171 38 54.05 159.43 + 433 307 48 44.89 -72.23 + 434 276 50 43.46 -101.50 + 435 274 49 43.99 -103.79 + 436 295 65 29.30 -84.04 + 437 282 66 27.90 -96.64 + 438 294 59 34.31 -84.42 + 439 295 59 34.27 -83.83 + 440 294 52 41.34 -84.43 + 441 293 52 41.28 -85.84 + 442 293 51 41.81 -85.44 + 443 292 52 41.57 -86.73 + 444 274 44 48.93 -103.30 + 445 278 44 48.88 -99.62 + 446 274 47 46.19 -103.43 + 447 279 47 46.02 -99.35 + 448 280 47 46.17 -98.07 + 449 295 47 46.01 -83.74 + 450 289 46 46.88 -89.32 + 451 292 46 45.58 -87.00 + 452 259 59 34.36 -117.63 + 453 257 59 34.94 -119.69 + 454 256 59 34.48 -120.23 + 455 256 59 34.61 -120.08 + 456 266 49 43.74 -111.10 + 457 295 59 34.85 -84.00 + 458 274 49 44.35 -103.77 + 459 273 49 44.41 -104.36 + 460 283 56 37.80 -94.77 + 461 286 56 37.64 -92.65 + 462 285 55 38.35 -93.34 + 463 255 54 39.49 -121.61 + 464 290 60 33.90 -88.33 + 465 274 59 34.64 -103.63 + 466 304 54 38.83 -75.43 + 467 279 61 32.54 -99.25 + 468 284 54 38.81 -94.26 + 469 280 57 36.34 -97.92 + 470 279 59 34.36 -98.98 + 471 293 52 40.72 -85.93 + 472 281 63 30.72 -97.38 + 473 279 53 40.32 -98.44 + 474 293 62 31.46 -85.46 + 475 293 61 32.54 -85.79 + 476 276 44 48.50 -101.40 + 477 296 57 36.17 -83.40 + 478 304 50 43.47 -75.46 + 479 296 61 32.68 -83.35 + 480 291 57 36.74 -87.29 + 481 263 63 31.40 -114.49 + 482 264 61 32.37 -112.87 + 483 268 61 32.82 -109.68 + 484 261 44 48.39 -115.55 + 485 273 44 48.76 -104.52 + 486 253 56 37.70 -123.00 + 487 253 55 38.32 -123.07 + 488 273 46 46.37 -104.28 + 489 266 48 44.42 -111.37 + 490 265 51 42.17 -112.28 + 491 261 54 39.50 -115.95 + 492 257 55 38.30 -119.16 + 493 306 48 44.65 -73.49 + 494 304 52 41.14 -75.38 + 495 304 53 39.98 -75.82 + 496 275 56 37.28 -102.61 + 497 310 47 45.64 -70.26 + 498 311 45 47.46 -69.22 + 499 313 47 45.56 -67.43 + 500 306 51 42.05 -73.20 + 501 268 51 42.11 -109.45 + 502 269 51 42.49 -107.83 + 503 268 51 42.48 -108.84 + 504 267 50 43.20 -110.40 + 505 282 53 40.61 -95.87 + 506 281 52 41.24 -96.59 + 507 292 52 41.45 -87.01 + 508 290 52 41.42 -88.41 + 509 279 53 39.76 -98.79 + 510 271 53 40.51 -106.87 + 511 270 54 39.43 -107.38 + 512 271 54 39.48 -106.15 + 513 270 53 40.50 -107.52 + 514 289 52 41.02 -89.39 + 515 290 55 38.72 -88.18 + 516 280 54 39.06 -98.17 + 517 282 56 37.13 -96.19 + 518 282 55 38.37 -96.54 + 519 302 50 42.64 -77.05 + 520 303 51 41.77 -76.45 + 521 271 52 40.73 -106.28 + 522 303 50 43.45 -76.51 + 523 284 50 43.08 -94.27 + 524 283 50 43.40 -94.75 + 525 283 53 40.35 -94.92 + 526 295 55 38.22 -83.59 + 527 285 55 38.71 -93.18 + 528 285 54 39.42 -93.13 + 529 269 49 43.71 -108.39 + 530 266 50 43.18 -111.04 + 531 274 47 45.59 -103.55 + 532 222 31 60.82 -152.72 + 533 214 22 69.00 -160.00 + 534 190 40 52.00 177.55 + 535 207 35 57.00 -166.00 + 536 221 29 62.22 -153.08 + 537 217 32 59.73 -157.26 + 538 227 30 61.89 -147.32 + 539 207 25 66.27 -166.05 + 540 259 60 34.10 -117.23 + 541 284 53 40.63 -93.90 + 542 266 60 33.40 -110.77 + 543 264 60 33.97 -112.74 + 544 303 55 38.80 -76.07 + 545 305 52 41.05 -74.63 + 546 231 29 62.72 -143.97 + 547 231 24 67.75 -144.11 + 548 231 26 65.59 -144.36 + 549 223 30 61.95 -151.00 + 550 220 35 57.27 -154.56 + 551 227 25 66.15 -148.03 + 552 232 24 67.03 -143.29 + 553 213 26 65.20 -161.15 + 554 223 32 59.75 -151.37 + 555 215 28 63.39 -158.83 + 556 242 35 56.97 -134.00 + 557 211 23 67.95 -162.31 + 558 224 31 60.59 -150.32 + 559 225 31 60.37 -149.41 + 560 232 30 61.32 -142.59 + 561 223 32 60.03 -151.66 + 562 215 23 68.07 -158.71 + 563 214 30 61.58 -159.54 + 564 229 29 63.03 -145.49 + 565 209 26 65.41 -164.66 + 566 221 28 63.44 -153.36 + 567 240 33 59.25 -135.52 + 568 243 37 55.58 -133.10 + 569 224 28 63.49 -150.88 + 570 232 29 62.97 -143.34 + 571 233 28 64.05 -141.93 + 572 220 25 66.85 -154.34 + 573 222 25 66.08 -152.17 + 574 218 30 61.64 -156.44 + 575 218 22 69.50 -156.50 + 576 226 22 69.00 -149.00 + 577 226 24 67.75 -149.00 + 578 218 26 65.34 -155.95 + 579 219 27 64.10 -155.56 + 580 223 27 64.31 -151.08 + 581 214 31 60.32 -160.20 + 582 210 30 62.10 -163.80 + 583 212 21 70.40 -161.90 + 584 217 20 71.32 -156.62 + 585 312 51 42.35 -67.70 + 586 308 54 39.30 -72.00 + 587 314 54 39.30 -65.70 + 588 310 56 37.30 -70.10 + 589 305 57 36.30 -74.60 + 590 306 58 35.60 -73.80 + 591 309 59 34.90 -70.80 + 592 303 60 33.80 -76.00 + 593 307 61 32.80 -72.30 + 594 245 45 48.10 -130.50 + 595 249 45 48.10 -126.60 + 596 246 48 45.30 -129.70 + 597 251 48 45.30 -125.60 + 598 246 51 41.75 -129.90 + 599 250 51 41.90 -125.80 + 600 251 54 39.20 -125.50 + 601 251 57 36.40 -125.40 + 602 251 60 33.30 -125.00 + 603 254 59 34.60 -122.30 + 604 255 63 30.90 -121.50 + 605 260 64 29.60 -117.00 + 606 304 56 37.54 -76.01 + 607 270 61 32.99 -106.97 + 608 267 62 32.15 -109.84 + 609 295 50 42.63 -83.98 + 610 295 50 42.99 -84.14 + 611 296 51 42.10 -83.16 + 612 295 51 41.87 -84.07 + 613 294 48 44.02 -83.54 + 614 295 50 43.46 -83.45 + 615 296 51 41.94 -83.43 + 616 274 53 40.61 -103.26 + 617 274 53 40.34 -103.80 + 618 285 53 40.08 -93.59 + 619 285 53 40.48 -93.01 + 620 284 53 40.25 -94.33 + 621 285 52 40.68 -92.90 + 622 286 51 41.71 -92.73 + 623 285 51 42.47 -93.27 + 624 290 55 38.66 -88.45 + 625 291 54 39.02 -87.65 + 626 289 54 39.53 -89.33 + 627 268 50 43.31 -109.19 + 628 269 51 41.67 -107.98 + 629 266 49 43.50 -110.96 + 630 280 54 39.47 -98.13 + 631 284 56 37.85 -94.31 + 632 290 51 41.93 -88.71 + 633 267 56 37.44 -110.56 + 634 271 53 40.45 -106.75 + 635 281 56 37.32 -97.39 + 636 271 56 37.45 -106.80 + 637 270 49 44.03 -107.45 + 638 266 51 42.08 -110.96 + 639 269 55 38.79 -108.06 + 640 271 53 40.35 -106.70 + 641 290 65 29.30 -88.84 + 642 376 62 32.13 -7.88 + 643 307 51 41.89 -72.71 + 644 12 32 60.20 11.10 + 645 20 32 59.67 17.93 + 646 20 33 59.35 17.95 + 647 27 31 60.32 24.97 + 648 383 32 60.13 -1.18 + 649 380 36 56.50 -4.58 + 650 324 131 -33.00 -57.00 + 651 383 41 51.68 -1.78 + 652 383 41 51.75 -1.58 + 653 383 41 51.29 -0.27 + 654 378 40 52.25 -6.33 + 655 360 28 63.97 -22.60 + 656 377 30 62.02 -6.76 + 657 14 36 55.77 12.53 + 658 5 40 52.03 4.35 + 659 5 42 50.90 4.47 + 660 8 46 46.82 6.95 + 661 3 44 48.73 2.40 + 662 376 50 43.37 -8.42 + 663 383 51 41.67 -1.02 + 664 379 56 37.17 -5.62 + 665 360 78 16.75 -22.95 + 666 16 24 67.27 14.37 + 667 194 20 70.97 -178.53 + 668 193 22 68.92 -179.48 + 669 203 25 66.17 -169.83 + 670 182 27 64.68 170.42 + 671 190 27 64.73 177.50 + 672 192 29 63.05 179.32 + 673 68 39 53.21 63.55 + 674 378 34 58.22 -6.32 + 675 2 40 52.63 1.32 + 676 72 42 50.22 66.83 + 677 79 43 49.80 73.15 + 678 379 42 50.08 -5.25 + 679 378 38 54.65 -6.22 + 680 307 59 34.90 -73.00 + 681 304 61 32.30 -75.20 + 682 301 61 32.50 -79.07 + 683 302 65 29.30 -77.40 + 684 299 62 31.40 -80.87 + 685 301 65 28.90 -78.50 + 686 301 61 32.80 -79.62 + 687 299 62 32.28 -80.41 + 688 289 68 25.90 -89.70 + 689 285 68 25.90 -93.60 + 690 293 68 25.90 -85.90 + 691 283 66 27.90 -95.00 + 692 282 67 27.00 -96.50 + 693 309 55 38.50 -70.70 + 694 311 50 42.60 -68.60 + 695 311 50 43.53 -70.14 + 696 310 53 40.50 -69.40 + 697 306 55 38.50 -74.70 + 698 313 52 41.10 -66.60 + 699 305 57 36.60 -74.80 + 700 308 53 40.70 -72.10 + 701 307 53 40.30 -73.20 + 702 314 49 44.30 -67.30 + 703 307 53 40.37 -73.70 + 704 304 56 37.76 -75.33 + 705 291 45 48.06 -87.78 + 706 293 48 45.33 -86.42 + 707 297 48 45.35 -82.84 + 708 292 45 47.56 -86.55 + 709 297 52 41.68 -82.40 + 710 290 45 47.32 -89.87 + 711 292 50 42.67 -87.02 + 712 297 49 44.28 -82.42 + 713 303 50 43.62 -77.41 + 714 297 52 42.47 -81.22 + 715 301 50 43.40 -79.45 + 716 301 50 42.74 -79.35 + 717 292 51 42.14 -87.66 + 718 226 36 56.30 -148.20 + 719 245 51 42.50 -130.50 + 720 218 40 51.90 -155.90 + 721 245 47 46.10 -131.00 + 722 238 52 40.90 -137.50 + 723 252 54 39.20 -124.00 + 724 251 50 42.75 -124.82 + 725 252 47 46.20 -124.20 + 726 252 53 40.40 -124.50 + 727 195 35 57.00 -177.70 + 728 257 61 32.40 -119.50 + 729 259 61 32.49 -118.03 + 730 106 76 18.77 98.96 + 731 110 78 16.47 102.78 + 732 107 79 15.77 100.14 + 733 112 80 15.25 104.87 + 734 108 81 13.67 100.61 + 735 108 88 7.19 100.61 + 736 211 71 23.40 -162.30 + 737 216 78 17.20 -157.80 + 738 213 75 19.30 -160.80 + 739 222 77 17.40 -152.50 + 740 10 37 55.52 8.55 + 741 6 40 52.31 4.76 + 742 8 40 52.28 6.89 + 743 223 32 59.77 -151.17 + 744 7 44 48.98 6.25 + 745 280 64 29.70 -98.01 + 746 292 57 36.25 -86.57 + 747 283 57 36.18 -95.56 + 748 258 58 35.24 -119.03 + 749 288 55 38.62 -90.18 + 750 273 55 38.46 -104.18 + 751 297 52 41.50 -81.60 + 752 264 50 43.11 -112.68 + 753 261 45 47.47 -115.80 + 754 3 52 41.28 2.07 + 755 380 53 40.42 -4.25 + 756 380 53 39.50 -0.47 + 757 277 57 36.50 -100.80 + 758 276 64 30.30 -101.70 + 759 275 66 27.70 -102.50 + 760 279 63 30.50 -99.10 + 761 279 66 28.40 -98.70 + 762 279 68 26.30 -98.80 + 763 281 70 24.40 -97.40 + 764 283 68 25.90 -95.30 + 765 284 70 24.50 -94.40 + 766 283 71 23.30 -95.60 + 767 284 76 18.30 -94.20 + 768 285 73 21.40 -93.10 + 769 287 71 23.30 -91.60 + 770 290 71 23.70 -88.70 + 771 294 71 23.00 -85.00 + 772 280 71 23.50 -98.00 + 773 287 47 46.02 -91.45 + 774 296 54 38.83 -82.80 + 775 301 54 38.88 -78.52 + 776 300 54 39.62 -78.76 + 777 302 54 39.61 -77.01 + 778 10 39 53.05 8.79 + 779 15 40 52.47 13.40 + 780 8 42 50.87 7.15 + 781 10 42 50.05 8.58 + 782 10 44 48.68 9.22 + 783 13 44 48.35 11.78 + 784 16 44 48.23 14.19 + 785 18 45 48.12 16.57 + 786 16 42 50.10 14.26 + 787 20 38 54.38 18.47 + 788 23 40 52.17 20.97 + 789 19 41 51.10 16.89 + 790 21 45 47.43 19.18 + 791 22 48 44.78 20.53 + 792 25 50 42.69 23.41 + 793 30 50 42.57 27.52 + 794 30 50 43.23 27.83 + 795 27 51 42.07 24.86 + 796 28 48 44.57 26.09 + 797 14 47 45.50 12.33 + 798 14 51 41.80 12.60 + 799 14 51 41.80 12.23 + 800 26 58 35.53 24.15 + 801 32 52 40.97 29.08 + 802 36 53 40.13 33.00 + 803 43 56 37.75 40.20 + 804 36 58 35.15 33.28 + 805 200 27 64.43 -173.23 + 806 33 32 59.58 30.18 + 807 41 36 55.75 37.57 + 808 89 37 55.03 82.90 + 809 141 50 43.12 131.90 + 810 83 50 43.23 76.93 + 811 48 51 41.68 44.95 + 812 80 50 43.07 74.47 + 813 74 52 41.27 69.27 + 814 39 62 31.98 35.98 + 815 38 62 31.87 35.22 + 816 50 69 24.88 46.77 + 817 50 70 24.72 46.72 + 818 52 65 29.22 47.98 + 819 47 57 36.32 43.15 + 820 48 60 33.22 44.23 + 821 51 63 30.57 47.78 + 822 53 66 28.00 49.00 + 823 54 67 27.00 50.00 + 824 55 58 35.68 51.32 + 825 66 63 31.05 61.47 + 826 76 59 34.42 70.47 + 827 71 63 31.31 65.85 + 828 56 69 25.25 51.57 + 829 60 69 25.25 55.33 + 830 59 70 24.42 54.65 + 831 311 14 76.53 -68.75 + 832 83 65 28.58 77.20 + 833 89 77 17.72 83.30 + 834 115 50 43.20 107.17 + 835 92 66 27.70 85.37 + 836 122 72 22.32 114.17 + 837 130 69 25.03 121.52 + 838 124 85 10.72 115.83 + 839 136 56 37.55 126.80 + 840 138 58 35.18 128.93 + 841 151 52 40.70 141.37 + 842 147 58 35.25 136.93 + 843 150 58 35.55 139.78 + 844 150 58 35.76 140.38 + 845 145 59 34.68 135.53 + 846 107 90 5.30 100.27 + 847 109 93 2.75 101.72 + 848 111 95 1.38 103.72 + 849 112 72 22.82 104.97 + 850 113 74 21.02 105.80 + 851 114 84 10.82 106.67 + 852 136 41 51.72 126.65 + 853 125 53 39.80 116.47 + 854 129 57 36.07 120.33 + 855 111 63 30.67 104.02 + 856 117 59 34.30 108.93 + 857 122 59 34.52 113.83 + 858 130 63 31.17 121.43 + 859 368 66 27.93 -15.38 + 860 376 60 33.57 -7.67 + 861 376 62 31.62 -8.03 + 862 6 72 22.82 5.47 + 863 3 82 13.48 2.17 + 864 381 78 16.72 -3.00 + 865 366 80 14.73 -17.50 + 866 367 82 13.35 -16.80 + 867 78 104 -7.30 72.42 + 868 25 70 24.22 23.30 + 869 30 62 31.33 27.22 + 870 40 97 -1.28 36.83 + 871 40 100 -3.42 37.07 + 872 41 101 -4.92 38.23 + 873 17 101 -4.38 15.45 + 874 4 89 6.58 3.33 + 875 4 89 5.60 -0.17 + 876 376 88 7.38 -7.53 + 877 380 90 5.25 -3.93 + 878 373 89 6.23 -10.37 + 879 15 105 -8.85 13.23 + 880 34 122 -24.37 31.05 + 881 20 132 -33.97 18.60 + 882 217 20 71.30 -156.78 + 883 231 21 70.13 -143.63 + 884 207 22 68.88 -166.13 + 885 211 24 66.87 -162.63 + 886 222 22 69.37 -152.13 + 887 223 24 66.92 -151.52 + 888 222 26 65.17 -152.10 + 889 230 25 66.57 -145.27 + 890 208 27 64.50 -165.43 + 891 213 28 63.88 -160.80 + 892 207 30 61.78 -166.03 + 893 212 31 60.78 -161.80 + 894 217 27 64.73 -156.93 + 895 219 29 62.97 -155.62 + 896 218 29 62.90 -155.98 + 897 219 31 61.10 -155.58 + 898 224 29 62.30 -150.10 + 899 223 30 61.97 -151.18 + 900 223 31 60.57 -151.25 + 901 227 27 64.82 -147.87 + 902 228 27 64.67 -147.10 + 903 229 28 63.97 -145.70 + 904 229 28 64.00 -145.73 + 905 229 30 62.15 -145.45 + 906 225 30 61.25 -149.80 + 907 224 31 61.17 -150.02 + 908 225 30 61.60 -149.08 + 909 228 32 61.13 -146.35 + 910 225 32 60.12 -149.45 + 911 234 27 64.78 -141.15 + 912 233 29 62.97 -141.93 + 913 229 31 60.50 -145.50 + 914 212 33 58.65 -162.07 + 915 203 35 57.15 -170.22 + 916 211 37 55.20 -162.73 + 917 215 33 59.05 -158.52 + 918 217 33 58.68 -156.65 + 919 219 32 59.75 -154.92 + 920 223 32 59.63 -151.50 + 921 228 32 59.43 -146.33 + 922 222 34 57.75 -152.50 + 923 236 32 59.52 -139.67 + 924 240 32 59.47 -135.30 + 925 240 34 58.42 -135.73 + 926 240 35 57.07 -135.35 + 927 241 34 58.37 -134.58 + 928 243 35 56.82 -132.97 + 929 243 36 56.48 -132.37 + 930 244 37 55.35 -131.70 + 931 244 37 55.03 -131.57 + 932 186 40 52.72 174.12 + 933 196 41 51.88 -176.65 + 934 204 39 52.95 -168.85 + 935 207 38 53.90 -166.55 + 936 245 40 51.93 -131.02 + 937 249 26 65.28 -126.75 + 938 251 19 72.00 -125.28 + 939 259 36 56.23 -117.43 + 940 257 14 76.23 -119.33 + 941 277 35 56.87 -101.08 + 942 280 36 55.75 -97.87 + 943 298 22 68.78 -81.25 + 944 318 8 82.50 -62.33 + 945 254 44 49.03 -122.37 + 946 249 42 50.68 -127.37 + 947 263 39 53.55 -114.10 + 948 263 39 53.30 -113.58 + 949 269 38 54.13 -108.52 + 950 266 42 50.27 -111.18 + 951 274 39 53.33 -104.00 + 952 308 46 46.90 -71.50 + 953 321 45 47.57 -59.17 + 954 257 43 49.95 -119.40 + 955 246 34 58.42 -130.00 + 956 299 50 43.17 -79.93 + 957 299 50 43.47 -80.38 + 958 317 48 44.88 -63.50 + 959 315 48 44.98 -64.92 + 960 315 49 43.72 -65.25 + 961 266 26 65.77 -111.25 + 962 296 51 42.27 -82.97 + 963 320 49 43.93 -60.02 + 964 317 48 44.63 -63.50 + 965 314 49 43.87 -66.10 + 966 314 48 45.32 -65.88 + 967 298 50 43.03 -81.15 + 968 300 49 43.67 -79.63 + 969 302 47 45.95 -77.32 + 970 306 47 45.47 -73.75 + 971 306 47 45.68 -74.03 + 972 304 48 45.32 -75.67 + 973 301 49 44.23 -78.37 + 974 300 48 44.97 -79.30 + 975 298 48 44.75 -81.10 + 976 314 47 45.83 -66.43 + 977 316 47 46.12 -64.68 + 978 317 47 46.28 -63.13 + 979 320 47 46.17 -60.05 + 980 308 46 46.80 -71.40 + 981 303 46 46.38 -75.97 + 982 302 45 48.05 -77.78 + 983 300 46 46.37 -79.42 + 984 294 45 47.97 -84.78 + 985 298 44 48.57 -81.37 + 986 291 40 52.23 -87.88 + 987 289 44 48.37 -89.32 + 988 253 44 48.65 -123.43 + 989 328 45 47.62 -52.73 + 990 326 44 48.95 -54.57 + 991 323 43 49.22 -57.40 + 992 314 42 50.22 -66.27 + 993 322 44 48.53 -58.55 + 994 320 39 53.32 -60.42 + 995 305 43 49.77 -74.53 + 996 306 39 53.75 -73.67 + 997 292 43 49.78 -86.93 + 998 298 41 51.27 -80.65 + 999 299 41 51.28 -80.60 + 1000 288 41 51.45 -90.20 + 1001 289 38 53.83 -89.87 + 1002 284 43 49.78 -94.37 + 1003 281 43 49.90 -97.23 + 1004 278 43 49.78 -99.65 + 1005 276 38 54.68 -101.68 + 1006 273 42 50.43 -104.67 + 1007 271 40 52.17 -106.68 + 1008 277 38 53.97 -101.10 + 1009 272 39 53.22 -105.68 + 1010 266 43 50.02 -110.72 + 1011 264 43 49.63 -112.80 + 1012 263 41 51.12 -114.02 + 1013 261 43 49.62 -115.78 + 1014 259 43 49.30 -117.63 + 1015 253 43 49.18 -123.17 + 1016 251 43 49.72 -124.90 + 1017 254 38 53.88 -122.68 + 1018 312 34 58.10 -68.42 + 1019 301 33 58.45 -78.12 + 1020 311 28 63.75 -68.53 + 1021 284 33 58.75 -94.07 + 1022 296 27 64.20 -83.37 + 1023 293 10 79.98 -85.93 + 1024 270 35 57.35 -107.13 + 1025 283 16 74.72 -94.95 + 1026 272 22 69.10 -105.12 + 1027 282 27 64.30 -96.00 + 1028 265 37 54.77 -112.02 + 1029 265 32 60.02 -111.95 + 1030 262 29 62.50 -114.40 + 1031 254 33 58.83 -122.58 + 1032 255 30 61.80 -121.20 + 1033 247 32 60.12 -128.82 + 1034 242 23 68.32 -133.53 + 1035 240 31 60.72 -135.07 + 1036 236 28 64.05 -139.13 + 1037 235 24 67.57 -139.82 + 1038 297 70 24.55 -81.75 + 1039 298 70 24.73 -81.05 + 1040 299 68 25.82 -80.28 + 1041 299 68 25.90 -80.28 + 1042 299 68 26.07 -80.15 + 1043 299 69 25.65 -80.43 + 1044 299 67 26.68 -80.12 + 1045 299 68 26.20 -80.17 + 1046 298 66 28.10 -80.65 + 1047 299 66 27.65 -80.42 + 1048 298 66 28.43 -81.32 + 1049 298 65 29.18 -81.05 + 1050 297 63 30.50 -81.69 + 1051 297 64 30.22 -81.88 + 1052 298 62 32.13 -81.19 + 1053 298 62 32.22 -80.70 + 1054 299 61 32.90 -80.03 + 1055 297 66 27.70 -82.38 + 1056 299 67 27.50 -80.37 + 1057 297 68 26.58 -81.87 + 1058 297 68 26.53 -81.75 + 1059 296 66 27.97 -82.53 + 1060 296 67 27.40 -82.55 + 1061 296 66 27.92 -82.68 + 1062 297 66 27.99 -82.02 + 1063 296 64 29.62 -83.10 + 1064 297 63 31.25 -82.40 + 1065 298 63 31.25 -81.47 + 1066 298 63 31.15 -81.37 + 1067 295 64 30.38 -84.37 + 1068 297 64 29.68 -82.27 + 1069 294 60 33.36 -84.57 + 1070 295 62 31.53 -84.18 + 1071 296 63 30.78 -83.28 + 1072 295 61 32.70 -83.65 + 1073 297 60 33.37 -81.97 + 1074 294 60 33.65 -84.42 + 1075 294 60 33.78 -84.52 + 1076 295 60 33.88 -84.30 + 1077 294 64 29.73 -84.98 + 1078 293 63 30.56 -85.92 + 1079 292 63 30.47 -87.18 + 1080 290 63 30.68 -88.25 + 1081 291 63 30.63 -88.07 + 1082 293 64 30.22 -85.68 + 1083 294 61 32.33 -84.83 + 1084 294 61 32.52 -84.93 + 1085 292 61 32.30 -86.40 + 1086 293 63 31.32 -85.45 + 1087 292 60 33.57 -86.75 + 1088 291 60 33.22 -87.62 + 1089 293 60 33.58 -85.85 + 1090 291 61 32.90 -87.25 + 1091 285 64 29.78 -93.30 + 1092 292 61 33.17 -86.77 + 1093 288 65 29.10 -90.20 + 1094 288 64 29.98 -90.25 + 1095 286 64 30.03 -91.88 + 1096 288 64 30.05 -90.03 + 1097 287 63 30.53 -91.15 + 1098 289 65 29.33 -89.40 + 1099 289 64 30.33 -89.82 + 1100 290 61 32.33 -88.75 + 1101 289 62 31.47 -89.33 + 1102 288 61 32.32 -90.08 + 1103 287 60 33.48 -90.98 + 1104 288 63 31.18 -90.47 + 1105 288 60 33.50 -90.08 + 1106 285 63 31.05 -93.20 + 1107 285 64 30.12 -93.22 + 1108 286 64 30.20 -91.98 + 1109 284 64 29.95 -94.02 + 1110 283 65 29.30 -94.80 + 1111 283 64 29.97 -95.35 + 1112 283 64 29.65 -95.28 + 1113 282 63 30.58 -96.37 + 1114 283 63 31.23 -94.75 + 1115 283 61 32.34 -95.40 + 1116 284 61 32.34 -94.65 + 1117 284 61 32.47 -93.82 + 1118 286 61 32.52 -92.03 + 1119 286 62 31.40 -92.30 + 1120 281 61 32.83 -97.30 + 1121 281 68 25.90 -97.43 + 1122 280 68 26.23 -97.65 + 1123 280 68 26.18 -98.23 + 1124 280 66 27.77 -97.50 + 1125 280 66 27.73 -98.03 + 1126 278 67 27.55 -99.47 + 1127 283 65 29.12 -95.47 + 1128 279 64 29.53 -98.47 + 1129 280 64 30.30 -97.70 + 1130 281 65 28.85 -96.92 + 1131 281 62 31.62 -97.22 + 1132 280 63 31.07 -97.83 + 1133 281 61 32.84 -96.85 + 1134 283 60 33.63 -95.45 + 1135 281 61 32.90 -97.03 + 1136 258 58 35.07 -118.15 + 1137 281 61 32.82 -97.37 + 1138 280 62 32.22 -98.18 + 1139 277 65 29.37 -100.92 + 1140 273 62 31.83 -104.80 + 1141 277 62 31.37 -100.50 + 1142 275 57 36.02 -102.55 + 1143 274 64 30.37 -104.02 + 1144 276 62 31.95 -102.18 + 1145 274 62 31.78 -103.20 + 1146 278 61 32.41 -99.68 + 1147 276 60 33.65 -101.82 + 1148 273 60 33.30 -104.53 + 1149 273 61 32.33 -104.27 + 1150 274 61 32.68 -103.22 + 1151 271 62 32.24 -106.22 + 1152 270 61 32.28 -106.92 + 1153 271 62 31.80 -106.40 + 1154 270 60 33.23 -107.27 + 1155 270 61 32.27 -107.72 + 1156 267 62 31.57 -110.33 + 1157 268 62 31.47 -109.60 + 1158 266 62 32.12 -110.93 + 1159 265 60 33.43 -112.02 + 1160 262 61 32.65 -114.60 + 1161 259 60 34.05 -117.60 + 1162 260 60 33.83 -116.50 + 1163 258 59 34.20 -118.35 + 1164 258 59 34.22 -118.48 + 1165 256 58 35.23 -120.63 + 1166 260 61 32.73 -117.17 + 1167 260 61 32.57 -116.98 + 1168 257 60 33.25 -119.45 + 1169 258 60 33.40 -118.42 + 1170 259 61 33.13 -117.28 + 1171 260 61 32.85 -117.12 + 1172 258 60 33.93 -118.40 + 1173 258 60 33.82 -118.15 + 1174 259 60 33.68 -117.87 + 1175 301 59 34.27 -77.90 + 1176 300 58 35.17 -79.02 + 1177 300 59 34.98 -78.87 + 1178 304 58 35.27 -75.55 + 1179 303 59 34.78 -76.87 + 1180 300 58 35.87 -78.78 + 1181 302 58 35.64 -77.39 + 1182 301 58 35.33 -77.97 + 1183 301 58 35.84 -77.90 + 1184 302 59 34.82 -77.61 + 1185 308 57 36.82 -72.10 + 1186 303 57 36.90 -76.19 + 1187 303 56 37.13 -76.50 + 1188 302 59 34.90 -76.88 + 1189 302 58 35.07 -77.05 + 1190 298 60 33.95 -81.12 + 1191 299 59 34.18 -79.72 + 1192 296 60 33.95 -83.32 + 1193 297 59 34.84 -82.35 + 1194 297 59 34.90 -82.22 + 1195 296 59 34.50 -82.72 + 1196 298 58 35.22 -80.93 + 1197 298 58 35.73 -81.37 + 1198 296 58 35.43 -82.55 + 1199 299 57 36.08 -79.94 + 1200 299 56 37.21 -80.41 + 1201 297 57 36.48 -82.40 + 1202 299 57 36.13 -80.22 + 1203 294 59 34.35 -85.16 + 1204 292 59 34.65 -86.77 + 1205 291 59 34.75 -87.62 + 1206 294 59 35.03 -85.20 + 1207 295 58 35.82 -83.98 + 1208 294 58 35.95 -85.08 + 1209 292 57 36.13 -86.68 + 1210 290 59 34.27 -88.77 + 1211 288 59 35.05 -90.00 + 1212 290 58 35.59 -88.92 + 1213 286 59 34.83 -92.25 + 1214 286 59 34.73 -92.23 + 1215 288 58 35.83 -90.65 + 1216 285 59 34.48 -93.10 + 1217 286 59 34.18 -91.93 + 1218 284 60 33.45 -93.98 + 1219 286 60 33.22 -92.80 + 1220 284 58 35.33 -94.37 + 1221 284 57 36.00 -94.17 + 1222 285 57 36.27 -93.15 + 1223 286 57 36.20 -92.47 + 1224 287 58 35.73 -91.65 + 1225 286 57 36.88 -91.90 + 1226 289 56 37.23 -89.57 + 1227 284 57 36.91 -94.02 + 1228 284 56 37.15 -94.50 + 1229 279 60 33.98 -98.50 + 1230 279 59 34.98 -99.05 + 1231 279 58 35.33 -99.20 + 1232 278 57 36.30 -99.77 + 1233 280 58 35.40 -97.60 + 1234 281 57 36.73 -97.10 + 1235 280 59 34.60 -98.40 + 1236 281 59 34.30 -97.02 + 1237 282 57 36.20 -95.90 + 1238 282 57 36.76 -96.01 + 1239 282 59 34.88 -95.78 + 1240 281 58 35.23 -97.47 + 1241 274 57 36.45 -103.15 + 1242 278 59 34.43 -100.28 + 1243 268 58 35.52 -108.78 + 1244 276 58 35.23 -101.70 + 1245 271 62 31.87 -106.70 + 1246 271 59 35.05 -106.62 + 1247 271 58 35.62 -106.08 + 1248 269 57 36.75 -108.23 + 1249 274 58 35.18 -103.60 + 1250 272 58 35.65 -105.15 + 1251 263 58 35.27 -113.95 + 1252 266 57 36.93 -111.45 + 1253 265 59 34.53 -112.47 + 1254 265 59 34.65 -112.42 + 1255 266 59 35.02 -110.73 + 1256 267 59 34.27 -110.00 + 1257 268 59 34.51 -109.38 + 1258 265 58 35.13 -111.67 + 1259 265 58 35.23 -111.82 + 1260 265 58 35.95 -112.15 + 1261 262 59 34.77 -114.62 + 1262 259 59 34.92 -117.90 + 1263 260 59 34.84 -116.78 + 1264 258 59 34.73 -118.22 + 1265 259 59 34.63 -118.08 + 1266 258 58 35.43 -119.05 + 1267 262 57 36.08 -115.17 + 1268 261 57 36.62 -116.02 + 1269 257 57 36.77 -119.72 + 1270 257 60 34.12 -119.12 + 1271 257 59 34.43 -119.83 + 1272 257 59 34.21 -119.20 + 1273 256 59 34.75 -120.57 + 1274 256 59 34.90 -120.45 + 1275 256 58 35.66 -120.63 + 1276 302 56 37.50 -77.33 + 1277 301 55 38.13 -78.44 + 1278 301 56 37.35 -78.43 + 1279 304 55 37.93 -75.48 + 1280 302 54 38.95 -77.44 + 1281 302 55 38.27 -77.45 + 1282 303 55 38.28 -76.40 + 1283 304 55 38.33 -75.51 + 1284 302 54 38.84 -77.03 + 1285 303 54 39.18 -76.67 + 1286 302 54 39.70 -77.73 + 1287 303 54 39.33 -76.42 + 1288 305 54 39.45 -74.57 + 1289 304 54 39.37 -75.07 + 1290 304 53 39.88 -75.25 + 1291 304 53 40.08 -75.01 + 1292 304 54 39.68 -75.60 + 1293 305 53 40.28 -74.82 + 1294 305 53 40.02 -74.60 + 1295 305 52 40.80 -74.42 + 1296 300 56 37.33 -79.19 + 1297 300 57 36.57 -79.33 + 1298 299 56 37.32 -79.97 + 1299 299 55 37.95 -79.83 + 1300 298 56 37.78 -81.12 + 1301 298 56 37.30 -81.19 + 1302 299 55 37.87 -80.40 + 1303 297 55 38.37 -81.60 + 1304 299 54 38.88 -79.85 + 1305 299 54 39.30 -80.23 + 1306 299 54 39.65 -79.92 + 1307 301 54 39.40 -77.98 + 1308 294 54 39.05 -84.67 + 1309 294 55 38.03 -84.60 + 1310 293 55 38.18 -85.73 + 1311 296 56 37.59 -83.32 + 1312 292 56 37.75 -87.16 + 1313 293 55 37.91 -85.97 + 1314 295 56 37.08 -84.08 + 1315 296 55 38.37 -82.55 + 1316 295 54 39.42 -83.83 + 1317 292 55 38.25 -86.95 + 1318 298 54 39.34 -81.43 + 1319 298 53 40.18 -80.65 + 1320 296 53 40.00 -82.88 + 1321 296 53 39.82 -82.93 + 1322 297 53 39.95 -81.90 + 1323 295 53 39.90 -84.20 + 1324 294 54 39.09 -84.42 + 1325 291 55 38.05 -87.53 + 1326 290 55 38.65 -88.97 + 1327 289 56 37.78 -89.25 + 1328 288 55 38.75 -90.37 + 1329 288 55 38.66 -90.65 + 1330 290 56 37.07 -88.77 + 1331 294 56 37.05 -84.61 + 1332 291 54 39.45 -87.32 + 1333 292 54 39.15 -86.62 + 1334 292 54 39.73 -86.27 + 1335 292 53 40.41 -86.93 + 1336 289 53 39.84 -89.67 + 1337 287 53 39.95 -91.20 + 1338 290 53 40.48 -88.92 + 1339 291 53 40.12 -87.60 + 1340 285 56 37.23 -93.38 + 1341 286 54 38.82 -92.22 + 1342 288 56 37.77 -90.43 + 1343 287 55 38.13 -91.77 + 1344 286 55 38.10 -92.55 + 1345 283 54 39.32 -94.72 + 1346 284 54 39.12 -94.60 + 1347 283 54 38.83 -94.89 + 1348 283 53 39.77 -94.92 + 1349 281 56 37.65 -97.43 + 1350 280 55 38.07 -97.87 + 1351 283 56 37.66 -95.48 + 1352 281 55 38.06 -97.28 + 1353 278 56 37.77 -99.97 + 1354 277 55 37.93 -100.72 + 1355 277 56 37.04 -100.97 + 1356 279 55 38.34 -98.86 + 1357 279 54 38.85 -99.27 + 1358 279 56 37.27 -98.55 + 1359 281 54 39.13 -96.67 + 1360 282 55 38.33 -96.19 + 1361 283 54 39.07 -95.62 + 1362 282 54 38.95 -95.67 + 1363 280 54 39.55 -97.65 + 1364 279 54 38.87 -98.82 + 1365 280 55 38.80 -97.65 + 1366 276 56 37.01 -101.88 + 1367 272 56 37.45 -105.87 + 1368 270 56 37.15 -107.75 + 1369 269 55 37.95 -107.90 + 1370 274 55 38.05 -103.52 + 1371 275 55 38.07 -102.68 + 1372 273 55 38.28 -104.52 + 1373 276 54 39.37 -101.70 + 1374 278 54 39.38 -99.83 + 1375 273 54 38.82 -104.72 + 1376 273 54 39.57 -104.85 + 1377 270 54 39.65 -106.92 + 1378 271 54 39.22 -106.87 + 1379 270 55 38.53 -106.93 + 1380 273 55 38.70 -104.77 + 1381 273 53 39.75 -104.87 + 1382 274 53 40.17 -103.22 + 1383 272 53 39.91 -105.12 + 1384 266 54 39.62 -110.75 + 1385 268 56 37.62 -109.47 + 1386 266 55 38.37 -110.72 + 1387 263 56 37.04 -113.50 + 1388 264 56 37.70 -113.10 + 1389 265 56 37.70 -112.15 + 1390 269 54 39.12 -108.53 + 1391 269 55 38.50 -107.90 + 1392 269 56 37.30 -108.67 + 1393 273 53 40.43 -104.63 + 1394 272 53 40.45 -105.01 + 1395 267 54 39.00 -110.17 + 1396 267 55 38.76 -109.75 + 1397 264 54 39.33 -112.58 + 1398 265 51 41.78 -111.85 + 1399 258 56 37.37 -118.37 + 1400 256 56 37.28 -120.52 + 1401 255 55 38.52 -121.50 + 1402 255 55 38.55 -121.30 + 1403 255 55 38.70 -121.58 + 1404 260 55 38.05 -117.08 + 1405 258 55 38.55 -118.63 + 1406 262 54 39.28 -114.85 + 1407 262 56 37.62 -114.52 + 1408 257 54 39.50 -119.78 + 1409 257 54 39.57 -119.79 + 1410 255 57 36.58 -121.85 + 1411 255 57 36.66 -121.60 + 1412 255 55 37.90 -121.25 + 1413 255 56 37.70 -121.82 + 1414 254 56 37.73 -122.22 + 1415 254 56 37.62 -122.38 + 1416 254 56 37.37 -121.93 + 1417 253 55 38.52 -122.82 + 1418 307 52 40.87 -72.86 + 1419 305 52 40.70 -74.17 + 1420 305 52 40.84 -74.07 + 1421 306 52 40.77 -73.90 + 1422 307 52 40.80 -73.10 + 1423 306 51 41.63 -73.87 + 1424 306 52 41.07 -73.69 + 1425 305 52 41.50 -74.10 + 1426 307 52 41.17 -73.12 + 1427 307 52 41.27 -72.87 + 1428 308 52 41.33 -72.05 + 1429 308 52 41.17 -71.58 + 1430 309 51 41.65 -70.52 + 1431 310 52 41.25 -70.07 + 1432 309 51 41.92 -70.73 + 1433 309 51 41.68 -70.97 + 1434 310 51 41.67 -70.28 + 1435 308 51 41.73 -71.43 + 1436 308 52 41.60 -71.42 + 1437 307 51 41.93 -72.68 + 1438 308 51 41.73 -72.18 + 1439 307 50 42.57 -72.27 + 1440 307 51 41.73 -72.65 + 1441 309 50 42.58 -70.92 + 1442 309 51 42.37 -71.03 + 1443 308 51 42.27 -71.87 + 1444 303 53 40.38 -75.97 + 1445 303 53 40.20 -76.76 + 1446 303 53 40.12 -76.29 + 1447 300 52 41.18 -78.90 + 1448 301 53 40.30 -78.32 + 1449 300 53 40.32 -78.83 + 1450 301 52 40.84 -77.85 + 1451 304 52 41.33 -75.73 + 1452 302 52 41.25 -76.92 + 1453 305 51 41.70 -74.80 + 1454 303 51 42.22 -75.98 + 1455 303 51 42.48 -76.44 + 1456 302 51 42.17 -76.90 + 1457 307 49 43.53 -72.95 + 1458 304 53 40.65 -75.43 + 1459 306 50 42.75 -73.80 + 1460 306 50 43.33 -73.62 + 1461 304 48 44.68 -75.47 + 1462 303 50 43.12 -76.12 + 1463 304 50 43.15 -75.37 + 1464 299 53 40.50 -80.22 + 1465 299 52 40.77 -80.40 + 1466 299 53 40.34 -79.93 + 1467 300 53 40.28 -79.40 + 1468 298 52 40.91 -81.43 + 1469 300 51 42.15 -79.26 + 1470 297 52 41.42 -81.87 + 1471 296 52 40.82 -82.52 + 1472 298 52 41.27 -80.67 + 1473 299 51 42.08 -80.18 + 1474 301 51 41.80 -78.62 + 1475 299 52 41.38 -79.87 + 1476 301 50 42.93 -78.73 + 1477 300 50 43.10 -78.94 + 1478 302 50 43.12 -77.67 + 1479 291 51 41.98 -87.90 + 1480 290 51 41.92 -88.25 + 1481 290 53 40.03 -88.28 + 1482 290 53 39.83 -88.87 + 1483 289 53 40.66 -89.68 + 1484 289 51 41.74 -89.68 + 1485 294 52 41.00 -85.20 + 1486 293 53 40.25 -85.40 + 1487 291 51 41.62 -87.42 + 1488 291 51 41.78 -87.75 + 1489 291 51 41.87 -87.60 + 1490 291 51 42.42 -87.87 + 1491 292 51 41.70 -86.32 + 1492 295 52 41.60 -83.80 + 1493 295 52 41.02 -83.67 + 1494 296 51 42.23 -83.33 + 1495 296 51 42.42 -83.02 + 1496 296 50 42.92 -82.53 + 1497 291 45 47.45 -87.90 + 1498 294 50 42.77 -84.60 + 1499 294 51 42.27 -84.47 + 1500 294 51 42.30 -85.25 + 1501 289 51 42.20 -89.10 + 1502 288 52 41.45 -90.52 + 1503 287 51 41.88 -91.70 + 1504 287 52 40.78 -91.13 + 1505 285 52 41.53 -93.65 + 1506 286 52 41.10 -92.45 + 1507 283 52 40.75 -95.41 + 1508 288 51 42.40 -90.70 + 1509 287 50 43.28 -91.74 + 1510 286 50 42.55 -92.40 + 1511 285 50 43.15 -93.33 + 1512 284 50 42.55 -94.20 + 1513 283 50 42.60 -95.23 + 1514 282 52 41.30 -95.90 + 1515 281 52 40.84 -96.75 + 1516 281 53 40.30 -96.75 + 1517 280 52 40.97 -98.32 + 1518 279 51 41.62 -98.95 + 1519 279 52 40.73 -99.00 + 1520 282 51 41.76 -96.18 + 1521 283 53 40.08 -95.60 + 1522 278 52 41.44 -99.64 + 1523 281 51 41.98 -97.43 + 1524 281 52 41.45 -97.34 + 1525 279 51 42.47 -98.69 + 1526 282 51 42.40 -96.38 + 1527 282 52 41.32 -96.37 + 1528 275 52 41.10 -102.98 + 1529 277 52 41.13 -100.68 + 1530 276 53 40.51 -101.62 + 1531 279 53 40.45 -99.33 + 1532 275 51 42.05 -102.80 + 1533 275 50 42.83 -103.10 + 1534 273 52 41.15 -104.82 + 1535 272 52 41.32 -105.67 + 1536 273 53 39.87 -104.67 + 1537 274 51 41.87 -103.60 + 1538 271 50 42.92 -106.47 + 1539 268 53 40.43 -109.52 + 1540 270 53 40.48 -107.22 + 1541 270 54 39.53 -107.73 + 1542 265 52 40.78 -111.97 + 1543 265 53 40.22 -111.72 + 1544 268 52 41.60 -109.07 + 1545 270 51 41.80 -107.20 + 1546 265 52 41.20 -112.02 + 1547 269 50 42.82 -108.73 + 1548 269 50 43.07 -108.47 + 1549 266 52 41.28 -111.03 + 1550 266 49 43.60 -110.73 + 1551 264 50 42.92 -112.60 + 1552 265 49 43.52 -112.07 + 1553 261 48 44.88 -116.10 + 1554 258 53 40.07 -118.57 + 1555 263 52 40.73 -114.03 + 1556 261 52 40.87 -115.73 + 1557 261 52 40.83 -115.78 + 1558 261 51 41.67 -115.78 + 1559 259 52 40.90 -117.80 + 1560 256 53 40.38 -120.57 + 1561 256 54 39.28 -120.70 + 1562 256 54 39.32 -120.13 + 1563 256 54 38.90 -120.00 + 1564 263 49 43.50 -114.30 + 1565 262 51 42.48 -114.48 + 1566 263 50 42.55 -113.77 + 1567 255 51 42.15 -121.73 + 1568 253 54 39.13 -123.20 + 1569 254 53 40.15 -122.25 + 1570 254 53 40.50 -122.30 + 1571 252 52 40.98 -124.10 + 1572 252 51 41.78 -124.23 + 1573 256 52 41.50 -120.53 + 1574 253 51 42.37 -122.87 + 1575 308 50 43.20 -71.50 + 1576 309 50 43.08 -70.82 + 1577 309 49 43.65 -70.32 + 1578 309 50 43.40 -70.72 + 1579 312 48 44.45 -68.37 + 1580 311 49 44.07 -69.10 + 1581 313 48 44.92 -67.00 + 1582 312 45 47.28 -68.32 + 1583 311 48 44.80 -68.83 + 1584 307 50 43.35 -72.52 + 1585 307 49 43.63 -72.30 + 1586 308 49 44.36 -71.55 + 1587 308 48 44.42 -72.02 + 1588 307 49 44.20 -72.57 + 1589 308 49 43.57 -71.42 + 1590 309 48 44.58 -71.18 + 1591 307 50 42.90 -72.27 + 1592 306 48 44.47 -73.15 + 1593 309 48 44.53 -70.53 + 1594 310 49 44.05 -70.28 + 1595 310 49 44.32 -69.80 + 1596 310 47 45.47 -69.58 + 1597 311 47 45.65 -68.68 + 1598 306 48 44.65 -73.47 + 1599 305 48 44.93 -74.85 + 1600 303 49 44.00 -76.01 + 1601 305 49 44.38 -74.19 + 1602 295 50 42.70 -83.47 + 1603 294 48 44.90 -84.72 + 1604 293 50 42.88 -85.52 + 1605 292 51 42.14 -86.44 + 1606 293 51 42.23 -85.55 + 1607 292 50 43.17 -86.25 + 1608 295 50 42.97 -83.75 + 1609 296 50 42.67 -83.42 + 1610 295 49 43.53 -84.08 + 1611 294 49 44.36 -84.67 + 1612 293 49 44.28 -85.42 + 1613 292 49 44.28 -86.25 + 1614 293 48 44.73 -85.58 + 1615 295 48 45.07 -83.57 + 1616 296 48 44.45 -83.40 + 1617 291 50 42.95 -87.90 + 1618 289 50 43.13 -89.33 + 1619 290 50 42.62 -89.04 + 1620 288 50 43.21 -90.18 + 1621 287 49 43.87 -91.25 + 1622 287 48 44.87 -91.48 + 1623 286 49 43.92 -92.50 + 1624 290 48 44.48 -88.13 + 1625 291 49 44.13 -87.68 + 1626 290 49 43.98 -88.55 + 1627 289 48 44.93 -89.63 + 1628 289 48 44.78 -89.67 + 1629 292 47 45.73 -87.08 + 1630 291 48 45.12 -87.63 + 1631 285 48 44.85 -93.57 + 1632 281 49 43.58 -96.73 + 1633 281 49 44.31 -96.82 + 1634 281 50 42.92 -97.38 + 1635 279 49 43.80 -99.32 + 1636 280 49 44.38 -98.22 + 1637 280 49 43.77 -98.03 + 1638 281 48 44.92 -97.15 + 1639 284 47 45.55 -94.07 + 1640 284 46 46.40 -94.13 + 1641 283 48 44.55 -95.08 + 1642 283 47 45.87 -95.40 + 1643 282 48 44.45 -95.82 + 1644 284 49 44.32 -94.50 + 1645 284 47 45.95 -94.35 + 1646 285 48 44.88 -93.22 + 1647 285 48 44.95 -93.07 + 1648 284 49 44.22 -93.91 + 1649 284 49 43.65 -94.42 + 1650 285 49 43.68 -93.37 + 1651 280 47 45.45 -98.43 + 1652 275 49 44.06 -103.05 + 1653 272 49 44.35 -105.53 + 1654 270 48 44.77 -106.97 + 1655 267 48 44.54 -110.42 + 1656 269 49 43.97 -107.95 + 1657 272 47 45.45 -105.40 + 1658 273 46 47.13 -104.80 + 1659 277 47 45.55 -100.41 + 1660 278 49 44.38 -100.28 + 1661 276 47 45.93 -102.17 + 1662 268 48 44.52 -109.02 + 1663 267 50 42.58 -110.11 + 1664 266 48 44.68 -111.12 + 1665 269 47 45.80 -108.53 + 1666 268 46 47.05 -109.47 + 1667 264 47 45.95 -112.50 + 1668 264 48 45.25 -112.55 + 1669 266 47 45.78 -111.15 + 1670 267 47 45.70 -110.45 + 1671 261 49 43.57 -116.22 + 1672 258 49 43.58 -118.95 + 1673 255 49 44.25 -121.15 + 1674 263 48 45.12 -113.88 + 1675 261 47 45.95 -116.13 + 1676 258 47 45.68 -118.85 + 1677 259 48 44.83 -117.82 + 1678 253 50 43.23 -123.35 + 1679 252 50 43.42 -124.25 + 1680 253 49 44.12 -123.22 + 1681 253 48 44.92 -123.00 + 1682 252 48 44.58 -124.06 + 1683 254 47 45.60 -122.60 + 1684 254 47 45.55 -122.40 + 1685 253 47 45.53 -122.95 + 1686 255 47 45.62 -121.17 + 1687 312 47 46.12 -67.80 + 1688 310 46 46.62 -69.53 + 1689 312 46 46.87 -68.01 + 1690 312 46 46.68 -68.05 + 1691 295 46 46.47 -84.37 + 1692 294 47 46.25 -84.47 + 1693 294 47 45.57 -84.80 + 1694 289 47 45.63 -89.47 + 1695 291 46 46.53 -87.55 + 1696 291 46 46.35 -87.40 + 1697 291 47 45.82 -88.12 + 1698 290 46 47.17 -88.50 + 1699 288 46 46.53 -90.13 + 1700 286 46 46.83 -92.18 + 1701 285 45 47.38 -92.83 + 1702 282 46 46.83 -95.89 + 1703 287 45 47.82 -91.83 + 1704 285 44 48.57 -93.38 + 1705 284 44 48.73 -94.62 + 1706 281 46 46.90 -96.80 + 1707 279 46 46.93 -98.68 + 1708 283 45 47.50 -94.93 + 1709 283 44 48.93 -95.33 + 1710 281 45 47.95 -97.18 + 1711 279 45 48.10 -98.87 + 1712 277 46 46.77 -100.75 + 1713 275 46 46.80 -102.80 + 1714 274 44 48.18 -103.63 + 1715 276 44 48.27 -101.28 + 1716 276 45 47.65 -101.43 + 1717 271 44 48.22 -106.62 + 1718 270 45 47.33 -106.93 + 1719 272 45 48.10 -105.58 + 1720 273 45 47.70 -104.20 + 1721 265 46 46.60 -112.00 + 1722 263 46 46.92 -114.08 + 1723 266 45 47.48 -111.37 + 1724 267 44 48.55 -109.77 + 1725 263 44 48.30 -114.27 + 1726 265 44 48.60 -112.37 + 1727 256 46 46.57 -120.53 + 1728 255 45 47.28 -121.33 + 1729 256 45 47.40 -120.02 + 1730 256 45 47.40 -120.20 + 1731 257 45 47.30 -119.52 + 1732 257 46 47.20 -119.32 + 1733 260 46 46.38 -117.02 + 1734 260 45 47.77 -116.82 + 1735 257 46 46.32 -119.27 + 1736 257 47 46.27 -119.12 + 1737 258 47 46.10 -118.28 + 1738 259 45 47.63 -117.53 + 1739 259 45 47.68 -117.32 + 1740 260 46 46.75 -117.12 + 1741 259 45 47.70 -117.60 + 1742 259 44 48.55 -117.88 + 1743 256 46 47.03 -120.53 + 1744 253 45 48.12 -123.50 + 1745 257 44 48.42 -119.53 + 1746 252 47 46.15 -123.88 + 1747 253 46 46.97 -122.90 + 1748 252 46 46.97 -123.93 + 1749 253 47 46.12 -122.94 + 1750 254 45 47.45 -122.30 + 1751 254 45 47.50 -122.22 + 1752 254 45 47.53 -122.30 + 1753 254 45 47.90 -122.28 + 1754 254 45 47.27 -122.58 + 1755 298 66 27.65 -81.33 + 1756 252 45 47.95 -124.55 + 1757 254 44 48.80 -122.53 + 1758 264 53 40.17 -112.93 + 1759 272 46 46.43 -105.87 + 1760 283 48 44.67 -95.45 + 1761 288 50 43.22 -90.53 + 1762 304 49 44.05 -75.73 + 1763 310 49 43.90 -70.25 + 1764 308 50 42.93 -71.43 + 1765 271 51 41.90 -106.19 + 1766 277 53 40.09 -100.65 + 1767 276 50 42.91 -101.69 + 1768 281 53 40.10 -97.34 + 1769 280 51 42.21 -97.79 + 1770 285 51 41.90 -93.70 + 1771 288 52 41.61 -90.57 + 1772 295 53 39.82 -84.03 + 1773 290 51 41.77 -88.48 + 1774 292 52 40.81 -87.05 + 1775 293 54 38.83 -85.42 + 1776 306 53 40.65 -73.78 + 1777 308 51 42.47 -71.28 + 1778 309 50 42.72 -71.12 + 1779 307 51 42.20 -72.53 + 1780 307 51 42.15 -72.72 + 1781 310 51 41.67 -69.97 + 1782 306 50 42.85 -73.93 + 1783 276 56 37.77 -102.18 + 1784 273 54 38.97 -104.82 + 1785 274 54 39.26 -103.70 + 1786 273 53 40.18 -104.72 + 1787 279 56 37.65 -99.09 + 1788 282 56 37.38 -95.63 + 1789 281 55 38.31 -97.30 + 1790 286 56 37.52 -92.70 + 1791 284 54 39.58 -94.19 + 1792 288 54 39.66 -90.48 + 1793 289 53 40.15 -89.33 + 1794 305 54 39.02 -74.92 + 1795 259 58 35.68 -117.68 + 1796 271 61 32.41 -106.35 + 1797 269 57 36.84 -107.91 + 1798 279 57 36.07 -99.22 + 1799 278 57 36.43 -99.53 + 1800 281 57 36.69 -97.48 + 1801 282 58 35.68 -95.86 + 1802 280 59 34.98 -97.52 + 1803 289 57 36.88 -89.97 + 1804 292 56 36.97 -86.42 + 1805 303 57 36.27 -76.18 + 1806 261 61 32.83 -115.58 + 1807 261 60 33.63 -116.17 + 1808 262 60 33.62 -114.72 + 1809 274 58 35.08 -103.61 + 1810 271 61 33.08 -106.12 + 1811 271 61 32.90 -106.40 + 1812 277 61 33.02 -100.98 + 1813 278 63 30.50 -99.77 + 1814 282 62 31.78 -95.71 + 1815 284 60 34.11 -94.29 + 1816 286 62 31.90 -92.78 + 1817 289 63 30.40 -89.07 + 1818 290 60 34.09 -88.86 + 1819 297 62 31.90 -81.63 + 1820 296 63 30.89 -83.01 + 1821 299 60 33.97 -80.47 + 1822 300 60 33.68 -78.93 + 1823 301 60 33.82 -78.72 + 1824 299 66 28.47 -80.55 + 1825 260 61 32.55 -116.97 + 1826 262 61 32.63 -115.24 + 1827 271 62 31.63 -106.43 + 1828 258 65 28.88 -118.30 + 1829 266 65 29.07 -110.97 + 1830 271 65 28.70 -105.97 + 1831 265 67 27.32 -112.30 + 1832 266 66 27.97 -110.93 + 1833 266 66 27.95 -110.80 + 1834 278 67 27.43 -99.57 + 1835 280 68 26.02 -98.23 + 1836 274 69 25.53 -103.45 + 1837 278 68 25.87 -100.20 + 1838 278 68 25.78 -100.10 + 1839 280 68 25.77 -97.53 + 1840 267 70 24.17 -110.42 + 1841 267 70 24.07 -110.37 + 1842 267 71 23.15 -109.70 + 1843 270 69 24.82 -107.40 + 1844 273 70 24.13 -104.53 + 1845 271 71 23.20 -106.42 + 1846 271 71 23.17 -106.27 + 1847 279 71 23.73 -99.13 + 1848 279 71 23.72 -98.97 + 1849 275 72 22.90 -102.68 + 1850 277 72 22.15 -100.98 + 1851 280 72 22.28 -97.87 + 1852 275 73 21.88 -102.30 + 1853 292 74 21.03 -86.87 + 1854 272 74 20.68 -105.25 + 1855 274 74 20.52 -103.32 + 1856 289 74 20.98 -89.65 + 1857 292 74 20.53 -86.93 + 1858 273 76 19.15 -104.57 + 1859 277 75 19.85 -101.03 + 1860 278 75 19.35 -99.57 + 1861 279 75 19.43 -99.10 + 1862 282 76 19.15 -96.18 + 1863 284 77 18.10 -94.58 + 1864 287 76 18.65 -91.80 + 1865 276 77 17.60 -101.47 + 1866 278 78 16.83 -99.92 + 1867 278 78 16.77 -99.75 + 1868 282 79 15.78 -96.27 + 1869 286 80 14.78 -92.38 + 1870 316 61 32.37 -64.68 + 1871 300 67 26.70 -78.97 + 1872 301 68 26.55 -78.69 + 1873 300 68 25.73 -79.30 + 1874 302 69 25.05 -77.47 + 1875 304 71 23.50 -75.76 + 1876 296 73 21.83 -82.78 + 1877 297 71 22.98 -82.40 + 1878 298 71 23.13 -81.28 + 1879 301 73 21.42 -77.85 + 1880 302 74 20.33 -77.12 + 1881 303 74 20.40 -76.62 + 1882 304 75 19.96 -75.85 + 1883 304 75 20.08 -75.15 + 1884 305 74 20.35 -74.50 + 1885 305 74 20.65 -74.92 + 1886 298 73 21.62 -81.55 + 1887 300 73 21.78 -78.78 + 1888 302 74 20.95 -76.94 + 1889 304 75 19.90 -75.12 + 1890 298 75 19.28 -81.35 + 1891 301 76 18.50 -77.92 + 1892 303 77 17.93 -76.78 + 1893 308 75 19.75 -72.18 + 1894 307 76 18.57 -72.30 + 1895 309 75 19.75 -70.55 + 1896 309 75 19.46 -70.69 + 1897 312 76 18.57 -68.37 + 1898 310 76 18.43 -69.67 + 1899 310 76 18.47 -69.88 + 1900 313 76 18.50 -67.12 + 1901 313 76 18.27 -67.15 + 1902 313 77 18.02 -66.57 + 1903 314 76 18.43 -66.00 + 1904 315 76 18.33 -64.97 + 1905 315 77 17.70 -64.80 + 1906 316 76 18.45 -64.53 + 1907 290 77 17.53 -88.30 + 1908 289 78 16.92 -89.88 + 1909 287 80 15.32 -91.47 + 1910 290 79 15.72 -88.60 + 1911 288 80 14.58 -90.52 + 1912 288 81 13.92 -90.82 + 1913 289 81 13.57 -89.83 + 1914 289 81 13.70 -89.12 + 1915 290 82 13.43 -89.05 + 1916 291 82 13.28 -87.67 + 1917 293 78 16.46 -85.92 + 1918 292 79 16.32 -86.53 + 1919 292 79 15.73 -86.87 + 1920 291 79 15.72 -87.48 + 1921 292 80 15.17 -87.12 + 1922 291 79 15.45 -87.93 + 1923 295 80 15.22 -83.80 + 1924 293 80 14.90 -85.93 + 1925 290 80 14.78 -88.78 + 1926 290 81 14.33 -88.17 + 1927 291 81 14.05 -87.22 + 1928 292 82 13.30 -87.18 + 1929 296 81 14.05 -83.37 + 1930 293 83 12.15 -86.17 + 1931 294 85 9.97 -84.78 + 1932 295 85 10.00 -84.22 + 1933 295 85 9.95 -84.15 + 1934 296 85 10.00 -83.05 + 1935 293 85 10.60 -85.55 + 1936 296 86 9.43 -82.52 + 1937 300 86 9.05 -79.37 + 1938 297 87 8.39 -82.42 + 1939 297 86 9.35 -82.25 + 1940 298 87 8.08 -80.94 + 1941 300 86 8.97 -79.51 + 1942 317 77 18.20 -63.05 + 1943 318 78 16.75 -62.17 + 1944 318 78 17.29 -62.68 + 1945 318 78 17.20 -62.58 + 1946 319 78 17.12 -61.78 + 1947 317 77 18.04 -63.12 + 1948 317 77 17.48 -62.98 + 1949 317 77 17.90 -62.85 + 1950 319 79 16.27 -61.52 + 1951 319 79 15.53 -61.30 + 1952 319 79 15.53 -61.40 + 1953 319 80 15.30 -61.40 + 1954 321 82 13.07 -59.48 + 1955 310 83 12.50 -70.01 + 1956 311 83 12.20 -68.97 + 1957 312 83 12.15 -68.28 + 1958 297 83 12.58 -81.72 + 1959 305 84 11.13 -74.23 + 1960 304 85 10.45 -75.52 + 1961 305 84 10.90 -74.77 + 1962 306 88 7.10 -73.20 + 1963 304 89 6.22 -75.60 + 1964 304 89 6.18 -75.43 + 1965 304 91 4.82 -75.80 + 1966 305 91 4.70 -74.13 + 1967 308 85 10.57 -71.73 + 1968 313 85 10.60 -66.98 + 1969 320 99 -2.83 -60.70 + 1970 333 98 -1.43 -48.48 + 1971 321 99 -3.15 -59.98 + 1972 343 100 -3.78 -38.53 + 1973 332 102 -5.53 -49.15 + 1974 339 101 -5.05 -42.82 + 1975 347 102 -5.92 -35.25 + 1976 342 104 -7.88 -40.08 + 1977 316 105 -8.70 -63.90 + 1978 332 105 -8.27 -49.28 + 1979 337 106 -9.07 -44.37 + 1980 333 107 -10.70 -48.40 + 1981 343 110 -13.00 -38.52 + 1982 325 113 -15.65 -56.10 + 1983 334 121 -23.00 -47.13 + 1984 332 120 -22.32 -49.07 + 1985 338 120 -22.90 -43.17 + 1986 335 121 -23.62 -46.65 + 1987 330 128 -30.08 -51.18 + 1988 289 97 -0.90 -89.62 + 1989 301 96 -0.12 -78.35 + 1990 299 98 -2.15 -79.88 + 1991 300 105 -8.08 -79.12 + 1992 302 109 -12.02 -77.03 + 1993 315 112 -14.75 -64.80 + 1994 312 114 -16.50 -68.17 + 1995 309 132 -33.38 -70.78 + 1996 309 133 -34.97 -71.22 + 1997 323 123 -25.16 -57.38 + 1998 322 125 -27.45 -59.05 + 1999 320 131 -32.92 -60.78 + 2000 322 133 -34.82 -58.53 + 2001 313 145 -45.78 -67.45 + 2002 215 72 21.98 -159.35 + 2003 216 73 21.32 -158.07 + 2004 216 73 21.35 -157.93 + 2005 217 73 21.15 -157.10 + 2006 218 74 20.90 -156.43 + 2007 155 82 13.35 144.80 + 2008 156 80 15.12 145.73 + 2009 178 75 19.28 166.65 + 2010 219 75 19.72 -155.07 + 2011 162 88 7.47 151.85 + 2012 169 89 6.97 158.22 + 2013 174 90 5.33 163.03 + 2014 179 87 8.73 167.73 + 2015 183 88 7.08 171.38 + 2016 144 88 7.33 134.48 + 2017 148 86 9.48 138.08 + 2018 155 81 14.20 145.20 + 2019 156 80 15.00 145.60 + 2020 156 76 18.80 145.70 + 2021 144 87 8.10 134.70 + 2022 142 90 5.30 132.20 + 2023 147 87 8.30 137.50 + 2024 150 85 10.00 139.80 + 2025 150 86 9.80 140.50 + 2026 144 88 7.40 134.90 + 2027 155 87 8.60 144.60 + 2028 157 88 7.40 147.10 + 2029 160 88 7.40 149.20 + 2030 160 87 8.60 149.70 + 2031 163 87 8.60 151.90 + 2032 163 89 6.90 152.70 + 2033 165 90 5.50 153.80 + 2034 166 92 3.80 155.00 + 2035 168 90 5.80 157.30 + 2036 169 89 7.00 157.90 + 2037 171 89 6.80 159.80 + 2038 172 89 6.20 160.70 + 2039 174 83 11.80 162.50 + 2040 177 86 8.90 165.70 + 2041 181 88 7.30 168.80 + 2042 181 90 5.90 169.60 + 2043 182 84 11.20 169.80 + 2044 182 86 9.50 170.20 + 2045 184 89 6.10 171.80 + 2046 234 115 -18.07 -140.95 + 2047 187 136 -37.02 174.80 + 2048 140 109 -12.42 130.87 + 2049 163 126 -27.63 152.72 + 2050 124 130 -31.92 115.97 + 2051 162 132 -33.95 151.18 + 2052 155 136 -37.67 144.83 + 2053 159 134 -35.40 148.98 + 2054 158 142 -42.83 147.50 + 2055 114 103 -6.15 106.85 + 2056 292 62 31.42 -87.05 + 2057 129 80 15.18 120.57 + 2058 130 80 14.52 121.00 + 2059 131 89 6.90 122.07 + 2060 302 60 33.49 -77.59 + 2061 262 65 29.37 -114.47 + 2062 263 53 40.33 -113.50 + 2063 300 58 35.17 -79.50 + 2064 276 44 48.83 -101.67 + 2065 276 44 44.22 -0.67 + 2066 23 126 -28.00 21.50 + 2067 312 139 -40.50 -68.00 + 2068 321 125 -27.33 -59.50 + 2069 144 126 -28.23 134.98 + 2070 356 55 38.70 -27.10 + 2071 225 31 61.20 -149.80 + 2072 369 104 -7.90 -14.40 + 2073 149 58 35.70 139.30 + 2074 326 44 48.90 -54.50 + 2075 6 49 43.50 4.90 + 2076 375 40 52.70 -8.90 + 2077 142 112 -14.50 132.30 + 2078 376 55 38.10 -7.90 + 2079 129 70 24.20 120.60 + 2080 108 82 12.70 101.00 + 2081 45 74 20.30 41.60 + 2082 202 111 -14.30 -170.70 + 2083 311 151 -51.60 -69.30 + 2084 268 125 -27.20 -109.40 + 2085 95 72 22.60 88.50 + 2086 347 105 -8.10 -34.90 + 2087 335 121 -23.40 -46.50 + 2088 86 88 7.20 79.90 + 2089 313 85 10.60 -67.00 + 2090 329 91 4.80 -52.40 + 2091 78 76 19.10 72.80 + 2092 51 116 -18.80 47.50 + 2093 266 62 32.20 -110.90 + 2094 275 49 44.10 -103.10 + 2095 228 27 64.60 -147.00 + 2096 256 59 34.80 -120.60 + 2097 259 59 34.60 -118.10 + 2098 263 53 40.19 -113.47 + 2099 74 59 34.95 69.27 + 2100 383 40 52.83 -1.32 + 2101 8 86 9.01 7.26 + 2102 260 71 23.61 -116.48 + 2103 68 47 46.00 63.56 + 2104 73 45 47.67 67.73 + 2105 218 75 19.73 -156.05 + 2106 240 33 59.23 -135.43 + 2107 228 31 61.13 -146.25 + 2108 217 74 20.78 -156.95 + 2109 217 74 21.02 -156.63 + 2110 314 77 17.85 -66.52 + 2111 313 77 18.17 -67.15 + 2112 285 58 35.25 -93.09 + 2113 137 113 -15.51 128.15 + 2114 377 35 57.48 -7.36 + 2115 11 38 54.38 10.13 diff --git a/parm/stage/analysis.yaml.j2 b/parm/stage/analysis.yaml.j2 new file mode 100644 index 0000000000..9a2ec5bbdf --- /dev/null +++ b/parm/stage/analysis.yaml.j2 @@ -0,0 +1,26 @@ +{% if path_exists(ICSDIR ~ "/" ~ COMOUT_ATMOS_ANALYSIS_MEM_list[0] | relpath(ROTDIR)) %} +analysis: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_ANALYSIS_MEM = COMOUT_ATMOS_ANALYSIS_MEM_list[imem] %} + - "{{ COMOUT_ATMOS_ANALYSIS_MEM }}" + {% endfor %} + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_ANALYSIS_MEM = COMOUT_ATMOS_ANALYSIS_MEM_list[imem] %} + {% for ftype in ["abias", "abias_air", "abias_int", "abias_pc", "atminc.nc", "atmi009.nc", "atmi003.nc", "radstat", "ratminc.nc", "ratmi009.nc", "ratmi003.nc"] %} + {% if path_exists(ICSDIR ~ "/" ~ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) ~ "/" ~ RUN ~ ".t" ~ current_cycle_HH ~ "z." ~ ftype) %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ RUN }}.t{{ current_cycle_HH }}z.{{ ftype }}", "{{ COMOUT_ATMOS_ANALYSIS_MEM }}"] + {% endif %} + {% endfor %} + {% if DO_JEDIATMVAR %} + {% for ftype in ["satbias.nc", "satbias_cov.nc", "tlapse.txt"] %} + {% for file in glob(ICSDIR ~ "/" ~ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) ~ "/" ~ RUN ~ ".t" ~ current_cycle_HH ~ "z.atms_*." ~ ftype) %} + - ["{{ file }}", "{{ COMOUT_ATMOS_ANALYSIS_MEM }}"] + {% endfor %} + {% endfor %} + {% endif %} + {% endfor %} # mem loop +{% endif %} diff --git a/parm/stage/atmosphere_cold.yaml.j2 b/parm/stage/atmosphere_cold.yaml.j2 new file mode 100644 index 0000000000..9eeaaf4b9e --- /dev/null +++ b/parm/stage/atmosphere_cold.yaml.j2 @@ -0,0 +1,18 @@ +atmosphere_cold: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_INPUT_MEM = COMOUT_ATMOS_INPUT_MEM_list[imem] %} + - "{{ COMOUT_ATMOS_INPUT_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_INPUT_MEM = COMOUT_ATMOS_INPUT_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_INPUT_MEM | relpath(ROTDIR) }}/gfs_ctrl.nc", "{{ COMOUT_ATMOS_INPUT_MEM }}"] + {% for ftype in ["gfs_data", "sfc_data"] %} + {% for ntile in range(1, ntiles + 1) %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_INPUT_MEM | relpath(ROTDIR) }}/{{ ftype }}.tile{{ ntile }}.nc", "{{ COMOUT_ATMOS_INPUT_MEM }}"] + {% endfor %} # ntile + {% endfor %} # ftype + {% endfor %} # mem loop diff --git a/parm/stage/atmosphere_nest.yaml.j2 b/parm/stage/atmosphere_nest.yaml.j2 new file mode 100644 index 0000000000..13ec0ed8c5 --- /dev/null +++ b/parm/stage/atmosphere_nest.yaml.j2 @@ -0,0 +1,33 @@ +atmosphere_nest: + {% set ntile = 7 %} + {% if EXP_WARM_START == True %} + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_RESTART_PREV_MEM = COMOUT_ATMOS_RESTART_PREV_MEM_list[imem] %} + - "{{ COMOUT_ATMOS_RESTART_PREV_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_RESTART_PREV_MEM = COMOUT_ATMOS_RESTART_PREV_MEM_list[imem] %} + {% for ftype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.{{ ftype }}.tile{{ ntile }}.nc", "{{ COMOUT_ATMOS_RESTART_PREV_MEM }}/{{ m_prefix }}.{{ ftype }}.nest0{{ ntile-5 }}.tile{{ ntile }}.nc"] + {% endfor %} + {% endfor %} # mem loop + {% else %} # cold start + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_INPUT_MEM = COMOUT_ATMOS_INPUT_MEM_list[imem] %} + - "{{ COMOUT_ATMOS_INPUT_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_INPUT_MEM = COMOUT_ATMOS_INPUT_MEM_list[imem] %} + {% for ftype in ["gfs_data", "sfc_data"] %} + - ["{{ COMOUT_ATMOS_INPUT_MEM }}/{{ ftype }}.tile{{ ntile }}.nc", "{{ COMOUT_ATMOS_INPUT_MEM }}/{{ ftype }}.nest0{{ ntile-5 }}.tile{{ ntile }}.nc"] + {% endfor %} + {% endfor %} # mem loop + {% endif %} diff --git a/parm/stage/atmosphere_perturbation.yaml.j2 b/parm/stage/atmosphere_perturbation.yaml.j2 new file mode 100644 index 0000000000..0e097b71dc --- /dev/null +++ b/parm/stage/atmosphere_perturbation.yaml.j2 @@ -0,0 +1,13 @@ +atmosphere_perturbation: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_ANALYSIS_MEM = COMOUT_ATMOS_ANALYSIS_MEM_list[imem] %} + - "{{ COMOUT_ATMOS_ANALYSIS_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_ANALYSIS_MEM = COMOUT_ATMOS_ANALYSIS_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.fv3_perturbation.nc", "{{ COMOUT_ATMOS_ANALYSIS_MEM }}/{{ RUN }}.t{{ current_cycle_HH }}z.atminc.nc"] + {% endfor %} # mem loop diff --git a/parm/stage/atmosphere_warm.yaml.j2 b/parm/stage/atmosphere_warm.yaml.j2 new file mode 100644 index 0000000000..14c8615262 --- /dev/null +++ b/parm/stage/atmosphere_warm.yaml.j2 @@ -0,0 +1,28 @@ +atmosphere_warm: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_RESTART_PREV_MEM = COMOUT_ATMOS_RESTART_PREV_MEM_list[imem] %} + - "{{ COMOUT_ATMOS_RESTART_PREV_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ATMOS_RESTART_PREV_MEM = COMOUT_ATMOS_RESTART_PREV_MEM_list[imem] %} + {% if path_exists(ICSDIR ~ "/" ~ COMOUT_ATMOS_RESTART_PREV_MEM | relpath(ROTDIR) ~ "/" ~ m_prefix ~ ".atm_stoch.res.nc") %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.atm_stoch.res.nc", "{{ COMOUT_ATMOS_RESTART_PREV_MEM }}"] + {% endif %} # path_exists + {% for ftype in ["coupler.res", "fv_core.res.nc"] %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.{{ ftype }}", "{{ COMOUT_ATMOS_RESTART_PREV_MEM }}"] + {% endfor %} + {% for ftype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %} + {% for ntile in range(1, ntiles + 1) %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.{{ ftype }}.tile{{ ntile }}.nc", "{{ COMOUT_ATMOS_RESTART_PREV_MEM }}"] + {% endfor %} # ntile + {% endfor %} # ftype + {% for ntile in range(1, ntiles + 1) %} + {% if path_exists(ICSDIR ~ "/" ~ COMOUT_ATMOS_RESTART_PREV_MEM | relpath(ROTDIR) ~ "/" ~ p_prefix ~ ".sfcanl_data.tile" ~ ntile ~ ".nc") %} + - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ p_prefix }}.sfcanl_data.tile{{ ntile }}.nc", "{{ COMOUT_ATMOS_RESTART_PREV_MEM }}"] + {% endif %} # path_exists + {% endfor %} # ntile + {% endfor %} # mem loop diff --git a/parm/stage/ice.yaml.j2 b/parm/stage/ice.yaml.j2 new file mode 100644 index 0000000000..0e0aa40c7f --- /dev/null +++ b/parm/stage/ice.yaml.j2 @@ -0,0 +1,28 @@ +ice: + {% if DO_JEDIOCNVAR == True %} + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ICE_ANALYSIS_MEM = COMOUT_ICE_ANALYSIS_MEM_list[imem] %} + - "{{ COMOUT_ICE_ANALYSIS_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ICE_ANALYSIS_MEM = COMOUT_ICE_ANALYSIS_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_ICE_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.cice_model_anl.res.nc", "{{ COMOUT_ICE_ANALYSIS_MEM }}"] + {% endfor %} # mem loop + {% else %} + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ICE_RESTART_PREV_MEM = COMOUT_ICE_RESTART_PREV_MEM_list[imem] %} + - "{{ COMOUT_ICE_RESTART_PREV_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_ICE_RESTART_PREV_MEM = COMOUT_ICE_RESTART_PREV_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_ICE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.cice_model.res.nc", "{{ COMOUT_ICE_RESTART_PREV_MEM }}"] + {% endfor %} # mem loop + {% endif %} diff --git a/parm/stage/master_gefs.yaml.j2 b/parm/stage/master_gefs.yaml.j2 new file mode 100644 index 0000000000..bdd4c8de5f --- /dev/null +++ b/parm/stage/master_gefs.yaml.j2 @@ -0,0 +1,154 @@ +################################################################### +# This is the master yaml for the GEFS +# +# Cycle, member, and RUN settings are set before including each +# component yaml based on DO switches +# +# The included yamls are intended to be of the following structure: +# key1: +# mkdir: +# - "COM directory to create" +# copy: +# - ["source_file", "destination_file"] +# key2: +# mkdir: +# - "COM directory to create" +# copy: +# - ["source_file", "destination_file"] +# +# Any number of keys with nested mkdir and copy are permitted +# Jinja is permitted in this yaml, as long as the keys are: +# - COMOUT_ +# - DO_ATM, DO_OCN, DO_ICE, etc. +# For a full list see scripts/exglobal_stage_ic.py +################################################################### + +# Set cycle variables +# ------------------------ +{% set half_window = assim_freq // 2 %} +{% set half_window_begin = (-half_window | string + "H") | to_timedelta %} +{% set half_window_end = (half_window | string + "H") | to_timedelta %} +{% if DOIAU and MODE == "cycled" %} + {% set model_start_date_current_cycle = current_cycle | add_to_datetime(half_window_begin) %} +{% else %} + {% if REPLAY_ICS %} + {% set model_start_date_current_cycle = current_cycle | add_to_datetime(half_window_end) %} + {% else %} + {% set model_start_date_current_cycle = current_cycle %} + {% endif %} +{% endif %} + +{% set current_cycle_YMD = current_cycle | to_YMD %} +{% set current_cycle_HH = current_cycle | strftime("%H") %} +{% set previous_cycle_YMD = previous_cycle | to_YMD %} +{% set previous_cycle_HH = previous_cycle | strftime("%H") %} +{% set p_prefix = previous_cycle | strftime("%Y%m%d.%H0000") %} +{% set m_prefix = model_start_date_current_cycle | strftime("%Y%m%d.%H0000") %} + +# Set first/last mem for loop +# --------------------------- +{% set first_mem = 0 %} +{% set last_mem = NMEM_ENS %} + +# Declare to-be-filled lists of member COM directories +# ---------------------------------------------------- +{% set COMOUT_ATMOS_INPUT_MEM_list = [] %} +{% set COMOUT_ATMOS_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_ATMOS_ANALYSIS_MEM_list = [] %} +{% set COMOUT_ICE_ANALYSIS_MEM_list = [] %} +{% set COMOUT_ICE_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_OCEAN_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_OCEAN_ANALYSIS_MEM_list = [] %} +{% set COMOUT_MED_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_WAVE_RESTART_PREV_MEM_list = [] %} + +# Construct member COM directory lists +# ------------------------------------ +{% for mem in range(first_mem, last_mem + 1) %} + + {% set current_cycle_dict = ({ '${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle_YMD, + '${HH}':current_cycle_HH, + '${MEMDIR}': 'mem%03d' | format(mem) }) %} + {% set previous_cycle_dict = ({ '${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':previous_cycle_YMD, + '${HH}':previous_cycle_HH, + '${MEMDIR}': 'mem%03d' | format(mem) }) %} + + {% set COMOUT_ATMOS_INPUT_MEM = COM_ATMOS_INPUT_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_ATMOS_RESTART_PREV_MEM = COM_ATMOS_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_ICE_ANALYSIS_MEM = COM_ICE_ANALYSIS_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_ICE_RESTART_PREV_MEM = COM_ICE_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_OCEAN_RESTART_PREV_MEM = COM_OCEAN_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_OCEAN_ANALYSIS_MEM = COM_OCEAN_ANALYSIS_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_MED_RESTART_PREV_MEM = COM_MED_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_WAVE_RESTART_PREV_MEM = COM_WAVE_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + + # Append the member COM directories + {% do COMOUT_ATMOS_INPUT_MEM_list.append(COMOUT_ATMOS_INPUT_MEM)%} + {% do COMOUT_ATMOS_RESTART_PREV_MEM_list.append(COMOUT_ATMOS_RESTART_PREV_MEM)%} + {% do COMOUT_ATMOS_ANALYSIS_MEM_list.append(COMOUT_ATMOS_ANALYSIS_MEM)%} + {% do COMOUT_ICE_ANALYSIS_MEM_list.append(COMOUT_ICE_ANALYSIS_MEM)%} + {% do COMOUT_ICE_RESTART_PREV_MEM_list.append(COMOUT_ICE_RESTART_PREV_MEM)%} + {% do COMOUT_OCEAN_RESTART_PREV_MEM_list.append(COMOUT_OCEAN_RESTART_PREV_MEM)%} + {% do COMOUT_OCEAN_ANALYSIS_MEM_list.append(COMOUT_OCEAN_ANALYSIS_MEM)%} + {% do COMOUT_MED_RESTART_PREV_MEM_list.append(COMOUT_MED_RESTART_PREV_MEM)%} + {% do COMOUT_WAVE_RESTART_PREV_MEM_list.append(COMOUT_WAVE_RESTART_PREV_MEM)%} + +{% endfor %} + +################################################################### +# Initial condition to stage - include components based on switches +################################################################### + +{% if EXP_WARM_START %} +{% filter indent(width=4) %} +{% include "atmosphere_warm.yaml.j2" %} +{% endfilter %} +{% else %} # cold start +{% filter indent(width=4) %} +{% include "atmosphere_cold.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if REPLAY_ICS %} +{% filter indent(width=4) %} +{% include "atmosphere_perturbation.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_ICE %} +{% filter indent(width=4) %} +{% include "ice.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_OCN %} +{% filter indent(width=4) %} +{% include "ocean.yaml.j2" %} +{% endfilter %} +{% if DO_JEDIOCNVAR %} +{% filter indent(width=4) %} +{% include "ocean_rerun.yaml.j2" %} +{% endfilter %} +{% endif %} +{% if REPLAY_ICS %} +{% filter indent(width=4) %} +{% include "ocean_replay.yaml.j2" %} +{% endfilter %} +{% endif %} +{% if EXP_WARM_START %} +{% filter indent(width=4) %} +{% include "ocean_mediator.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} # DO_OCN + +{% if DO_WAVE %} +{% filter indent(width=4) %} +{% include "wave.yaml.j2" %} +{% endfilter %} +{% endif %} diff --git a/parm/stage/master_gfs.yaml.j2 b/parm/stage/master_gfs.yaml.j2 new file mode 100644 index 0000000000..5204221c9b --- /dev/null +++ b/parm/stage/master_gfs.yaml.j2 @@ -0,0 +1,189 @@ +################################################################### +# This is the master yaml for the GFS +# +# Cycle, member, and RUN settings are set before including each +# component yaml based on DO switches +# +# The included yamls are intended to be of the following structure: +# key1: +# mkdir: +# - "COM directory to create" +# copy: +# - ["source_file", "destination_file"] +# key2: +# mkdir: +# - "COM directory to create" +# copy: +# - ["source_file", "destination_file"] +# +# Any number of keys with nested mkdir and copy are permitted +# Jinja is permitted in this yaml, as long as the keys are: +# - COMOUT_ +# - DO_ATM, DO_OCN, DO_ICE, etc. +# For a full list see scripts/exglobal_stage_ic.py +################################################################### + +# Set cycle date variables +# ------------------------ +{% set half_window = assim_freq // 2 %} +{% set half_window_begin = (-half_window | string + "H") | to_timedelta %} +{% set half_window_end = (half_window | string + "H") | to_timedelta %} +{% if DOIAU and MODE == "cycled" %} + {% set model_start_date_current_cycle = current_cycle | add_to_datetime(half_window_begin) %} +{% else %} + {% if REPLAY_ICS %} + {% set model_start_date_current_cycle = current_cycle | add_to_datetime(half_window_end) %} + {% else %} + {% set model_start_date_current_cycle = current_cycle %} + {% endif %} +{% endif %} + +{% set current_cycle_YMD = current_cycle | to_YMD %} +{% set current_cycle_HH = current_cycle | strftime("%H") %} +{% set previous_cycle_YMD = previous_cycle | to_YMD %} +{% set previous_cycle_HH = previous_cycle | strftime("%H") %} +{% set p_prefix = previous_cycle | strftime("%Y%m%d.%H0000") %} +{% set m_prefix = model_start_date_current_cycle | strftime("%Y%m%d.%H0000") %} + +# Determine restart RUN +# --------------------- +{% set rRUN = RUN %} +{% if RUN == "gfs" %} + {% set rRUN = "gdas" %} +{% endif %} + +# Set first/last mem for loop +# --------------------------- +{% if RUN == "enkfgdas" %} # Ensemble RUN + {% set first_mem = 1 %} + {% set last_mem = NMEM_ENS %} +{% else %} # Deterministic RUN + {% set first_mem = -1 %} + {% set last_mem = -1 %} +{% endif %} + +# Declare to-be-filled lists of member COM directories +# ---------------------------------------------------- +{% set COMOUT_ATMOS_INPUT_MEM_list = [] %} +{% set COMOUT_ATMOS_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_ATMOS_ANALYSIS_MEM_list = [] %} +{% set COMOUT_ICE_ANALYSIS_MEM_list = [] %} +{% set COMOUT_ICE_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_OCEAN_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_OCEAN_ANALYSIS_MEM_list = [] %} +{% set COMOUT_MED_RESTART_PREV_MEM_list = [] %} +{% set COMOUT_WAVE_RESTART_PREV_MEM_list = [] %} + +# Construct member COM directory lists +# ------------------------------------ +{% for mem in range(first_mem, last_mem + 1) %} + + {% if mem >= 0 %} + {% set mem_char = 'mem%03d' | format(mem) %} + {% else %} + {% set mem_char = '' %} + {% endif %} + + {% set current_cycle_dict = ({ '${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle_YMD, + '${HH}':current_cycle_HH, + '${MEMDIR}': mem_char }) %} + {% set previous_cycle_dict = ({ '${ROTDIR}':ROTDIR, + '${RUN}':rRUN, + '${YMD}':previous_cycle_YMD, + '${HH}':previous_cycle_HH, + '${MEMDIR}': mem_char }) %} + {% set previous_cycle_and_run_dict = ({ '${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':previous_cycle_YMD, + '${HH}':previous_cycle_HH, + '${MEMDIR}': mem_char }) %} + + {% set COMOUT_ATMOS_INPUT_MEM = COM_ATMOS_INPUT_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_ATMOS_RESTART_PREV_MEM = COM_ATMOS_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_ICE_ANALYSIS_MEM = COM_ICE_ANALYSIS_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_ICE_RESTART_PREV_MEM = COM_ICE_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_OCEAN_RESTART_PREV_MEM = COM_OCEAN_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_OCEAN_ANALYSIS_MEM = COM_OCEAN_ANALYSIS_TMPL | replace_tmpl(current_cycle_dict) %} + {% set COMOUT_MED_RESTART_PREV_MEM = COM_MED_RESTART_TMPL | replace_tmpl(previous_cycle_dict) %} + {% set COMOUT_WAVE_RESTART_PREV_MEM = COM_WAVE_RESTART_TMPL | replace_tmpl(previous_cycle_and_run_dict) %} + + # Append the member COM directories + {% do COMOUT_ATMOS_INPUT_MEM_list.append(COMOUT_ATMOS_INPUT_MEM)%} + {% do COMOUT_ATMOS_RESTART_PREV_MEM_list.append(COMOUT_ATMOS_RESTART_PREV_MEM)%} + {% do COMOUT_ATMOS_ANALYSIS_MEM_list.append(COMOUT_ATMOS_ANALYSIS_MEM)%} + {% do COMOUT_ICE_ANALYSIS_MEM_list.append(COMOUT_ICE_ANALYSIS_MEM)%} + {% do COMOUT_ICE_RESTART_PREV_MEM_list.append(COMOUT_ICE_RESTART_PREV_MEM)%} + {% do COMOUT_OCEAN_RESTART_PREV_MEM_list.append(COMOUT_OCEAN_RESTART_PREV_MEM)%} + {% do COMOUT_OCEAN_ANALYSIS_MEM_list.append(COMOUT_OCEAN_ANALYSIS_MEM)%} + {% do COMOUT_MED_RESTART_PREV_MEM_list.append(COMOUT_MED_RESTART_PREV_MEM)%} + {% do COMOUT_WAVE_RESTART_PREV_MEM_list.append(COMOUT_WAVE_RESTART_PREV_MEM)%} + +{% endfor %} + +################################################################### +# Initial condition to stage - include components based on switches +################################################################### + +{% if MODE == "cycled" %} +{% filter indent(width=4) %} +{% include "analysis.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if EXP_WARM_START %} +{% filter indent(width=4) %} +{% include "atmosphere_warm.yaml.j2" %} +{% endfilter %} +{% else %} # cold start +{% filter indent(width=4) %} +{% include "atmosphere_cold.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_NEST %} +{% filter indent(width=4) %} +{% include "atmosphere_nest.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if REPLAY_ICS %} +{% filter indent(width=4) %} +{% include "atmosphere_perturbation.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_ICE %} +{% filter indent(width=4) %} +{% include "ice.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_OCN %} +{% filter indent(width=4) %} +{% include "ocean.yaml.j2" %} +{% endfilter %} +{% if DO_JEDIOCNVAR %} +{% filter indent(width=4) %} +{% include "ocean_rerun.yaml.j2" %} +{% endfilter %} +{% endif %} +{% if REPLAY_ICS %} +{% filter indent(width=4) %} +{% include "ocean_replay.yaml.j2" %} +{% endfilter %} +{% endif %} +{% if EXP_WARM_START %} +{% filter indent(width=4) %} +{% include "ocean_mediator.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} + +{% if DO_WAVE %} +{% filter indent(width=4) %} +{% include "wave.yaml.j2" %} +{% endfilter %} +{% endif %} diff --git a/parm/stage/ocean.yaml.j2 b/parm/stage/ocean.yaml.j2 new file mode 100644 index 0000000000..b57c36d4ac --- /dev/null +++ b/parm/stage/ocean.yaml.j2 @@ -0,0 +1,18 @@ +ocean: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_OCEAN_RESTART_PREV_MEM = COMOUT_OCEAN_RESTART_PREV_MEM_list[imem] %} + - "{{ COMOUT_OCEAN_RESTART_PREV_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_OCEAN_RESTART_PREV_MEM = COMOUT_OCEAN_RESTART_PREV_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.MOM.res.nc", "{{ COMOUT_OCEAN_RESTART_PREV_MEM }}"] + {% if OCNRES == "025" %} + {% for nn in range(1, 4) %} + - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.MOM.res_{{ nn }}.nc", "{{ COMOUT_OCEAN_RESTART_PREV_MEM }}"] + {% endfor %} + {% endif %} + {% endfor %} # mem loop diff --git a/parm/stage/ocean_mediator.yaml.j2 b/parm/stage/ocean_mediator.yaml.j2 new file mode 100644 index 0000000000..c986b2e746 --- /dev/null +++ b/parm/stage/ocean_mediator.yaml.j2 @@ -0,0 +1,15 @@ +{% if path_exists(ICSDIR ~ "/" ~ COMOUT_MED_RESTART_PREV_MEM_list[0] | relpath(ROTDIR) ~ "/" ~ m_prefix ~ ".ufs.cpld.cpl.r.nc") %} +ocean_mediator: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_MED_RESTART_PREV_MEM = COMOUT_MED_RESTART_PREV_MEM_list[imem] %} + - "{{ COMOUT_MED_RESTART_PREV_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_MED_RESTART_PREV_MEM = COMOUT_MED_RESTART_PREV_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_MED_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.ufs.cpld.cpl.r.nc", "{{ COMOUT_MED_RESTART_PREV_MEM }}"] + {% endfor %} # mem loop +{% endif %} # path exists diff --git a/parm/stage/ocean_replay.yaml.j2 b/parm/stage/ocean_replay.yaml.j2 new file mode 100644 index 0000000000..8b52108bec --- /dev/null +++ b/parm/stage/ocean_replay.yaml.j2 @@ -0,0 +1,13 @@ +ocean_replay: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_OCEAN_ANALYSIS_MEM = COMOUT_OCEAN_ANALYSIS_MEM_list[imem] %} + - "{{ COMOUT_OCEAN_ANALYSIS_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_OCEAN_ANALYSIS_MEM = COMOUT_OCEAN_ANALYSIS_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.mom6_perturbation.nc", "{{ COMOUT_OCEAN_ANALYSIS_MEM }}/mom6_increment.nc"] + {% endfor %} # mem loop diff --git a/parm/stage/ocean_rerun.yaml.j2 b/parm/stage/ocean_rerun.yaml.j2 new file mode 100644 index 0000000000..8b4042d730 --- /dev/null +++ b/parm/stage/ocean_rerun.yaml.j2 @@ -0,0 +1,13 @@ +ocean_rerun: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_OCEAN_ANALYSIS_MEM = COMOUT_OCEAN_ANALYSIS_MEM_list[imem] %} + - "{{ COMOUT_OCEAN_ANALYSIS_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_OCEAN_ANALYSIS_MEM = COMOUT_OCEAN_ANALYSIS_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ RUN }}.t{{ current_cycle_HH }}z.ocninc.nc", "{{ COMOUT_OCEAN_ANALYSIS_MEM }}"] + {% endfor %} # mem loop diff --git a/parm/stage/wave.yaml.j2 b/parm/stage/wave.yaml.j2 new file mode 100644 index 0000000000..d610430bc7 --- /dev/null +++ b/parm/stage/wave.yaml.j2 @@ -0,0 +1,13 @@ +wave: + mkdir: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_WAVE_RESTART_PREV_MEM = COMOUT_WAVE_RESTART_PREV_MEM_list[imem] %} + - "{{ COMOUT_WAVE_RESTART_PREV_MEM }}" + {% endfor %} # mem loop + copy: + {% for mem in range(first_mem, last_mem + 1) %} + {% set imem = mem - first_mem %} + {% set COMOUT_WAVE_RESTART_PREV_MEM = COMOUT_WAVE_RESTART_PREV_MEM_list[imem] %} + - ["{{ ICSDIR }}/{{ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.restart.{{ waveGRD }}", "{{ COMOUT_WAVE_RESTART_PREV_MEM }}"] + {% endfor %} # mem loop diff --git a/parm/ufs/fv3/diag_table b/parm/ufs/fv3/diag_table index dad8b6fac6..f44bfd82a4 100644 --- a/parm/ufs/fv3/diag_table +++ b/parm/ufs/fv3/diag_table @@ -77,6 +77,7 @@ #"gfs_dyn", "pfhy", "preshy", "fv3_history", "all", .false., "none", 2 #"gfs_dyn", "pfnh", "presnh", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "omga", "omga", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 "gfs_phys", "refl_10cm", "refl_10cm", "fv3_history", "all", .false., "none", 2 diff --git a/parm/ufs/fv3/diag_table_da b/parm/ufs/fv3/diag_table_da index 5e7149663a..339b6a42a5 100644 --- a/parm/ufs/fv3/diag_table_da +++ b/parm/ufs/fv3/diag_table_da @@ -30,6 +30,7 @@ #"gfs_dyn", "pfhy", "preshy", "fv3_history", "all", .false., "none", 2 #"gfs_dyn", "pfnh", "presnh", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "omga", "omga", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 diff --git a/parm/ufs/gocart/ExtData.other b/parm/ufs/gocart/ExtData.other index 7a0d63d6ca..5d2ddc5102 100644 --- a/parm/ufs/gocart/ExtData.other +++ b/parm/ufs/gocart/ExtData.other @@ -17,12 +17,12 @@ DU_UTHRES '1' Y E - none none uthres ExtData/n #====== Sulfate Sources ================================================= # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -SU_ANTHROL1 NA N Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -SU_ANTHROL2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_ANTHROL1 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_ANTHROL2 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Ship emissions -SU_SHIPSO2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -SU_SHIPSO4 NA N Y %y4-%m2-%d2t12:00:00 none none SO4_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_SHIPSO2 NA Y Y %y4-%m2-%d2t12:00:00 none none SO2_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +SU_SHIPSO4 NA Y Y %y4-%m2-%d2t12:00:00 none none SO4_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption SU_AIRCRAFT NA Y Y %y4-%m2-%d2t12:00:00 none none none /dev/null @@ -63,11 +63,11 @@ OC_MTPO NA Y Y %y4-%m2-%d2t12:00:00 none none mtpo ExtData/nexus/MEGAN_ OC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -OC_ANTEOC1 NA N Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -OC_ANTEOC2 NA N Y %y4-%m2-%d2t12:00:00 none none OC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_ANTEOC1 NA Y Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_ANTEOC2 NA Y Y %y4-%m2-%d2t12:00:00 none none OC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # EDGAR based ship emissions -OC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none OC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +OC_SHIP NA Y Y %y4-%m2-%d2t12:00:00 none none OC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption OC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null @@ -88,11 +88,11 @@ pSOA_ANTHRO_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null BC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null # Anthropogenic (BF & FF) emissions -- allowed to input as two layers -BC_ANTEBC1 NA N Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc -BC_ANTEBC2 NA N Y %y4-%m2-%d2t12:00:00 none none BC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_ANTEBC1 NA Y Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_ANTEBC2 NA Y Y %y4-%m2-%d2t12:00:00 none none BC_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # EDGAR based ship emissions -BC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none BC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc +BC_SHIP NA Y Y %y4-%m2-%d2t12:00:00 none none BC_ship ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc # Aircraft fuel consumption BC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none bc_aviation /dev/null diff --git a/scripts/exgdas_aero_analysis_generate_bmatrix.py b/scripts/exgdas_aero_analysis_generate_bmatrix.py new file mode 100755 index 0000000000..0d8389c40d --- /dev/null +++ b/scripts/exgdas_aero_analysis_generate_bmatrix.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# exgdas_aero_analysis_generate_bmatrix.py +# This script creates an AerosolBMatrix object +# and runs the methods needed +# to stage files, compute the variance, and write to com +# files needed for the variational solver +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.aero_bmatrix import AerosolBMatrix + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the aerosol variance and diffusion correlation tasks + AeroB = AerosolBMatrix(config) + AeroB.initialize() + AeroB.interpBackground() + AeroB.computeVariance() + AeroB.computeDiffusion() + AeroB.finalize() diff --git a/scripts/exgdas_enkf_earc.py b/scripts/exgdas_enkf_earc.py index a515ec9746..c724bdbd67 100755 --- a/scripts/exgdas_enkf_earc.py +++ b/scripts/exgdas_enkf_earc.py @@ -28,11 +28,13 @@ def main(): 'DOHYBVAR', 'DOIAU_ENKF', 'IAU_OFFSET', 'DOIAU', 'DO_CALC_INCREMENT', 'assim_freq', 'ARCH_CYC', 'ARCH_WARMICFREQ', 'ARCH_FCSTICFREQ', - 'IAUFHRS_ENKF'] + 'IAUFHRS_ENKF', 'NET'] archive_dict = AttrDict() for key in keys: - archive_dict[key] = archive.task_config[key] + archive_dict[key] = archive.task_config.get(key) + if archive_dict[key] is None: + print(f"Warning: key ({key}) not found in task_config!") # Also import all COMIN* directory and template variables for key in archive.task_config.keys(): diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 2720dd5d5f..1944325317 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -68,16 +68,6 @@ export DELTSFC=${DELTSFC:-6} APRUN_ESFC=${APRUN_ESFC:-${APRUN:-""}} NTHREADS_ESFC=${NTHREADS_ESFC:-${NTHREADS:-1}} -################################################################################ -# Preprocessing -mkdata=NO -if [ ! -d $DATA ]; then - mkdata=YES - mkdir -p $DATA -fi -cd $DATA || exit 99 - - ################################################################################ # Update surface fields in the FV3 restart's using global_cycle. @@ -137,6 +127,7 @@ if [ $DOIAU = "YES" ]; then export TILE_NUM=$n + # Copy inputs from COMIN to DATA for imem in $(seq 1 $NMEM_ENS); do smem=$((imem + mem_offset)) if (( smem > NMEM_ENS_MAX )); then @@ -150,24 +141,31 @@ if [ $DOIAU = "YES" ]; then COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL + + # determine where the input snow restart files come from + if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then + sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" + else + sfcdata_dir="${COMIN_ATMOS_RESTART_MEM_PREV}" + fi + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" - ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ - "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" - ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + ${NCP} "${sfcdata_dir}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - ${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \ - "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" if [[ ${GSI_SOILANAL} = "YES" ]]; then FHR=6 - ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + ${NCP} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ "${DATA}/lnd_incr.${cmem}" fi done # ensembles @@ -175,6 +173,33 @@ if [ $DOIAU = "YES" ]; then CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk + # Copy outputs from DATA to COMOUT + for imem in $(seq 1 $NMEM_ENS); do + smem=$((imem + mem_offset)) + if (( smem > NMEM_ENS_MAX )); then + smem=$((smem - NMEM_ENS_MAX)) + fi + gmemchar="mem"$(printf %03i "$smem") + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + cpfs "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + + + if [[ ${GSI_SOILANAL} = "YES" ]]; then + FHR=6 + ${NCP} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + "${DATA}/lnd_incr.${cmem}" + fi + done # ensembles + done fi @@ -184,6 +209,7 @@ if [ $DOSFCANL_ENKF = "YES" ]; then export TILE_NUM=$n + # Copy inputs from COMIN to DATA for imem in $(seq 1 $NMEM_ENS); do smem=$((imem + mem_offset)) if (( smem > NMEM_ENS_MAX )); then @@ -193,28 +219,49 @@ if [ $DOSFCANL_ENKF = "YES" ]; then cmem=$(printf %03i $imem) memchar="mem$cmem" - MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL - [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + # determine where the input snow restart files come from + if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then + sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" + else + sfcdata_dir="${COMIN_ATMOS_RESTART_MEM_PREV}" + fi - ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ - "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" - ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + ${NCP} "${sfcdata_dir}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - ${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \ - "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk + # Copy outputs from DATA to COMOUT + for imem in $(seq 1 "${NMEM_ENS}"); do + smem=$((imem + mem_offset)) + if (( smem > NMEM_ENS_MAX )); then + smem=$((smem - NMEM_ENS_MAX)) + fi + gmemchar="mem"$(printf %03i "${smem}") + cmem=$(printf %03i "${imem}") + memchar="mem${cmem}" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + [[ ! -d "${COM_ATMOS_RESTART_MEM}" ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + + cpfs "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + + done + done fi @@ -222,8 +269,7 @@ fi ################################################################################ # Postprocessing -cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATA +cd "${pwd}" || exit 1 -exit $err +exit ${err} diff --git a/scripts/exgdas_enkf_snow_recenter.py b/scripts/exgdas_enkf_snow_recenter.py new file mode 100755 index 0000000000..fcd501860c --- /dev/null +++ b/scripts/exgdas_enkf_snow_recenter.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# exgdas_enkf_snow_recenter.py +# This script creates an SnowEnsAnalysis class +# and will recenter the ensemble mean to the +# deterministic analysis and provide increments +# to create an ensemble of snow analyses +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.snowens_analysis import SnowEnsAnalysis + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the snow ensemble analysis task + anl = SnowEnsAnalysis(config) + anl.initialize() + anl.genWeights() + anl.genMask() + anl.regridDetBkg() + anl.regridDetInc() + anl.recenterEns() + anl.addEnsIncrements() + anl.finalize() diff --git a/scripts/exgfs_aero_init_aerosol.py b/scripts/exgfs_aero_init_aerosol.py index d098368202..aed6b88647 100755 --- a/scripts/exgfs_aero_init_aerosol.py +++ b/scripts/exgfs_aero_init_aerosol.py @@ -41,10 +41,10 @@ from functools import partial # Constants -atm_base_pattern = "{rot_dir}/{run}.%Y%m%d/%H/model_data/atmos/input" # Location of atmosphere ICs +atm_base_pattern = "{rot_dir}/{run}.%Y%m%d/%H/model/atmos/input" # Location of atmosphere ICs atm_file_pattern = "{path}/gfs_data.{tile}.nc" # Atm IC file names atm_ctrl_pattern = "{path}/gfs_ctrl.nc" # Atm IC control file name -restart_base_pattern = "{rot_dir}/{run}.%Y%m%d/%H/model_data/atmos/restart" # Location of restart files (time of previous run) +restart_base_pattern = "{rot_dir}/{run}.%Y%m%d/%H/model/atmos/restart" # Location of restart files (time of previous run) restart_file_pattern = "{file_base}/{timestamp}fv_core.res.{tile}.nc" # Name of restart data files (time when restart is valid) tracer_file_pattern = "{file_base}/{timestamp}fv_tracer.res.{tile}.nc" # Name of restart tracer files (time when restart is valid) dycore_file_pattern = "{file_base}/{timestamp}fv_core.res.nc" # Name of restart dycore file (time when restart is valid) diff --git a/scripts/exgfs_atmos_postsnd.sh b/scripts/exgfs_atmos_postsnd.sh index caf5443a50..8f2aa43568 100755 --- a/scripts/exgfs_atmos_postsnd.sh +++ b/scripts/exgfs_atmos_postsnd.sh @@ -18,11 +18,16 @@ # 7) 2018-07-18 Guang Ping Lou Generalize this version to other platforms # 8) 2019-10-18 Guang Ping Lou Transition to reading in NetCDF model data # 9) 2019-12-18 Guang Ping Lou generalizing to reading in NetCDF or nemsio +# 10) 2024-08-08 Bo Cui Update to handle one forecast at a time +# For GFSv17 bufr, total number of forecast hours is 141(num_hours=141) +# it requires 7 nodes & allocate 21 processes per node(num_ppn=21) ################################################################ source "${USHgfs}/preamble.sh" -cd $DATA +runscript=${USHgfs}/gfs_bufr.sh + +cd "${DATA}" || exit 2 ######################################## @@ -44,47 +49,109 @@ export NINT3=${FHOUT_GFS:-3} rm -f -r "${COM_ATMOS_BUFR}" mkdir -p "${COM_ATMOS_BUFR}" + GETDIM="${USHgfs}/getncdimlen" LEVS=$(${GETDIM} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf000.${atmfm}" pfull) declare -x LEVS -### Loop for the hour and wait for the sigma and surface flux file: -export FSTART=$STARTHOUR -sleep_interval=10 -max_tries=360 -# -while [ $FSTART -lt $ENDHOUR ] -do -export FINT=$NINT1 - # Define the end hour for the input - export FEND=$(expr $FSTART + $INCREMENT) - if test $FEND -lt 100; then FEND=0$FEND; fi - if [ $FSTART -eq 00 ] - then - export F00FLAG=YES - else - export F00FLAG=NO - fi - - if [ $FEND -eq $ENDHOUR ] - then - export MAKEBUFR=YES - fi - - filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${FEND}.${logfm}" - if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}"; then - err_exit "FATAL ERROR: logf${FEND} not found after waiting $((sleep_interval * ( max_tries - 1) )) secs" - fi +# Initialize an empty list to store the hours +hour_list=() -## 1-hourly output before $NEND1, 3-hourly output after - if [[ $((10#$FEND)) -gt $((10#$NEND1)) ]]; then - export FINT=$NINT3 - fi - ${USHgfs}/gfs_bufr.sh - - export FSTART="${FEND}" +# Generate hours from 0 to NEND1 with interval NINT1 +for (( hour=0; hour<=NEND1 && hour<=ENDHOUR; hour+=NINT1 )); do + hour_list+=("$(printf "%03d" "$hour")") +done + +# Generate hours from NEND1 + NINT3 to ENDHOUR with interval NINT3 +for (( hour=NEND1+NINT3; hour<=ENDHOUR; hour+=NINT3 )); do + hour_list+=("$(printf "%03d" "$hour")") +done + +# Print the hour list +echo "Hour List:" "${hour_list[@]}" + +# Count the number of elements in the hour_list +export ntasks="${#hour_list[@]}" + +# Print the total number of hours +echo "Total number of hours: $ntasks" + +# allocate 21 processes per node +# don't allocate more processes, or it might have memory issue +#export tasks_per_node=21 +#export APRUN="mpiexec -np ${ntasks} -ppn ${tasks_per_node} --cpu-bind core cfp " + +if [ -s "${DATA}/poescript_bufr" ]; then + rm ${DATA}/poescript_bufr +fi + +for fhr in "${hour_list[@]}"; do + + if [ ! -s "${DATA}/${fhr}" ]; then mkdir -p ${DATA}/${fhr}; fi + export FINT=${NINT1} + ## 1-hourly output before $NEND1, 3-hourly output after + if [[ $((10#${fhr})) -gt $((10#${NEND1})) ]]; then + export FINT=${NINT3} + fi + if [[ $((10#${fhr})) -eq 0 ]]; then + export F00FLAG="YES" + else + export F00FLAG="NO" + fi + + # Convert fhr to integer + fhr_int=$((10#$fhr)) + + # Get previous hour + if (( fhr_int == STARTHOUR )); then + fhr_p=${fhr_int} + else + fhr_p=$(( fhr_int - FINT )) + fi + + # Format fhr_p with leading zeros + fhr_p="$(printf "%03d" "$fhr_p")" + + filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${fhr}.${logfm}" + if [[ -z ${filename} ]]; then + echo "File ${filename} is required but not found." + err_exit "FATAL ERROR: logf${fhr} not found." + else + echo "${runscript} \"${fhr}\" \"${fhr_p}\" \"${FINT}\" \"${F00FLAG}\" \"${DATA}/${fhr}\"" >> "${DATA}/poescript_bufr" + fi done +# Run with MPMD +"${USHgfs}/run_mpmd.sh" "${DATA}/poescript_bufr" + +cd "${DATA}" || exit 2 + +# Initialize fortnum +fortnum=20 + +# Loop through each element in the array +for fhr in "${hour_list[@]}"; do + # Increment fortnum + fortnum=$((fortnum + 1)) + ${NLN} "${DATA}/${fhr}/fort.${fortnum}" "fort.${fortnum}" +done + +# start to generate bufr products at fhr=${ENDHOUR} + +export MAKEBUFR=YES +export fhr="$(printf "%03d" "$ENDHOUR")" +export FINT=${NINT1} +## 1-hourly output before $NEND1, 3-hourly output after +if [[ $((10#${fhr})) -gt $((10#${NEND1})) ]]; then + export FINT=${NINT3} +fi +if [[ $((10#${fhr})) -eq 0 ]]; then + export F00FLAG="YES" +else + export F00FLAG="NO" +fi +${runscript} "${fhr}" "${fhr_p}" "${FINT}" "${F00FLAG}" "${DATA}" + ############################################################## # Tar and gzip the individual bufr files and send them to /com ############################################################## @@ -105,7 +172,7 @@ fi # add appropriate WMO Headers. ######################################## rm -rf poe_col -for (( m = 1; m <= NUM_SND_COLLECTIVES ; m++ )); do +for (( m = 1; m <= NUM_SND_COLLECTIVES; m++ )); do echo "sh ${USHgfs}/gfs_sndp.sh ${m} " >> poe_col done @@ -123,4 +190,5 @@ ${APRUN_POSTSNDCFP} cmdfile sh "${USHgfs}/gfs_bfr2gpk.sh" + ############## END OF SCRIPT ####################### diff --git a/scripts/exglobal_aero_analysis_run.py b/scripts/exglobal_aero_analysis_variational.py similarity index 84% rename from scripts/exglobal_aero_analysis_run.py rename to scripts/exglobal_aero_analysis_variational.py index 85f4b963a4..dd5bb4f65a 100755 --- a/scripts/exglobal_aero_analysis_run.py +++ b/scripts/exglobal_aero_analysis_variational.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 -# exglobal_aero_analysis_run.py +# exglobal_aero_analysis_variational.py # This script creates an AerosolAnalysis object -# and runs the execute method +# and runs the variational method # which executes the global aerosol variational analysis import os @@ -19,4 +19,4 @@ # Instantiate the aerosol analysis task AeroAnl = AerosolAnalysis(config) - AeroAnl.execute() + AeroAnl.variational() diff --git a/scripts/exglobal_archive.py b/scripts/exglobal_archive.py index ec8154317f..793fa1c1ac 100755 --- a/scripts/exglobal_archive.py +++ b/scripts/exglobal_archive.py @@ -31,16 +31,20 @@ def main(): 'restart_interval_gdas', 'restart_interval_gfs', 'AERO_ANL_RUN', 'AERO_FCST_RUN', 'DOIBP_WAV', 'DO_JEDIOCNVAR', 'NMEM_ENS', 'DO_JEDIATMVAR', 'DO_VRFY_OCEANDA', 'FHMAX_FITS', - 'IAUFHRS', 'DO_FIT2OBS'] + 'IAUFHRS', 'DO_FIT2OBS', 'NET'] archive_dict = AttrDict() for key in keys: - archive_dict[key] = archive.task_config[key] + archive_dict[key] = archive.task_config.get(key) + if archive_dict[key] is None: + print(f"Warning: key ({key}) not found in task_config!") # Also import all COMIN* and COMOUT* directory and template variables for key in archive.task_config.keys(): - if key.startswith("COMIN_") or key.startswith("COMOUT_"): - archive_dict[key] = archive.task_config[key] + if key.startswith("COM_") or key.startswith("COMIN_") or key.startswith("COMOUT_"): + archive_dict[key] = archive.task_config.get(key) + if archive_dict[key] is None: + print(f"Warning: key ({key}) not found in task_config!") cwd = os.getcwd() diff --git a/scripts/exglobal_atm_analysis_finalize.py b/scripts/exglobal_atm_analysis_finalize.py index 3f4313631c..35220928c9 100755 --- a/scripts/exglobal_atm_analysis_finalize.py +++ b/scripts/exglobal_atm_analysis_finalize.py @@ -21,4 +21,6 @@ # Instantiate the atm analysis task AtmAnl = AtmAnalysis(config) + + # Finalize JEDI variational analysis AtmAnl.finalize() diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index 66f6796343..72413ddbd4 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # exglobal_atm_analysis_fv3_increment.py # This script creates an AtmAnalysis object -# and runs the init_fv3_increment and fv3_increment methods +# and runs the initialize_fv3inc and execute methods # which convert the JEDI increment into an FV3 increment import os @@ -17,7 +17,9 @@ # Take configuration from environment and cast it as python dictionary config = cast_strdict_as_dtypedict(os.environ) - # Instantiate the atm analysis task - AtmAnl = AtmAnalysis(config) - AtmAnl.init_fv3_increment() - AtmAnl.fv3_increment() + # Instantiate the atm analysis object + AtmAnl = AtmAnalysis(config, 'atmanlfv3inc') + + # Initialize and execute FV3 increment converter + AtmAnl.initialize_jedi() + AtmAnl.execute(config.APRUN_ATMANLFV3INC) diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py index 1793b24b0b..9deae07bb3 100755 --- a/scripts/exglobal_atm_analysis_initialize.py +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -20,5 +20,8 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atm analysis task - AtmAnl = AtmAnalysis(config) - AtmAnl.initialize() + AtmAnl = AtmAnalysis(config, 'atmanlvar') + + # Initialize JEDI variational analysis + AtmAnl.initialize_jedi() + AtmAnl.initialize_analysis() diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index 07bc208331..8359532069 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -18,5 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atm analysis task - AtmAnl = AtmAnalysis(config) - AtmAnl.variational() + AtmAnl = AtmAnalysis(config, 'atmanlvar') + + # Execute JEDI variational analysis + AtmAnl.execute(config.APRUN_ATMANLVAR, ['fv3jedi', 'variational']) diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py index b49cb3c413..d68c260e78 100755 --- a/scripts/exglobal_atmens_analysis_finalize.py +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -21,4 +21,6 @@ # Instantiate the atmens analysis task AtmEnsAnl = AtmEnsAnalysis(config) + + # Finalize ensemble DA analysis AtmEnsAnl.finalize() diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py index c50b00548f..48eb6a6a1e 100755 --- a/scripts/exglobal_atmens_analysis_fv3_increment.py +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_fv3_increment.py # This script creates an AtmEnsAnalysis object -# and runs the init_fv3_increment and fv3_increment methods +# and runs the initialize_fv3inc and execute methods # which convert the JEDI increment into an FV3 increment import os @@ -17,7 +17,9 @@ # Take configuration from environment and cast it as python dictionary config = cast_strdict_as_dtypedict(os.environ) - # Instantiate the atmens analysis task - AtmEnsAnl = AtmEnsAnalysis(config) - AtmEnsAnl.init_fv3_increment() - AtmEnsAnl.fv3_increment() + # Instantiate the atmens analysis object + AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlfv3inc') + + # Initialize and execute JEDI FV3 increment converter + AtmEnsAnl.initialize_jedi() + AtmEnsAnl.execute(config.APRUN_ATMENSANLFV3INC) diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py index 1d578b44f2..326fe80628 100755 --- a/scripts/exglobal_atmens_analysis_initialize.py +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -20,5 +20,11 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atmens analysis task - AtmEnsAnl = AtmEnsAnalysis(config) - AtmEnsAnl.initialize() + if not config.lobsdiag_forenkf: + AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlletkf') + else: + AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') + + # Initialize JEDI ensemble DA analysis + AtmEnsAnl.initialize_jedi() + AtmEnsAnl.initialize_analysis() diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index 30394537cd..45b06524fe 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_letkf.py # This script creates an AtmEnsAnalysis object -# and runs the letkf method -# which executes the global atm local ensemble analysis +# and runs the execute method which executes +# the global atm local ensemble analysis import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -18,5 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atmens analysis task - AtmEnsAnl = AtmEnsAnalysis(config) - AtmEnsAnl.letkf() + AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlletkf') + + # Execute the JEDI ensemble DA analysis + AtmEnsAnl.execute(config.APRUN_ATMENSANLLETKF, ['fv3jedi', 'localensembleda']) diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py new file mode 100755 index 0000000000..c701f8cb4e --- /dev/null +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_obs.py +# This script creates an AtmEnsAnalysis object +# and runs the execute method +# which executes the global atm local ensemble analysis in observer mode +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') + + # Initialize and execute JEDI ensembler DA analysis in observer mode + AtmEnsAnl.execute(config.APRUN_ATMENSANLOBS, ['fv3jedi', 'localensembleda']) diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py new file mode 100755 index 0000000000..be78e694b1 --- /dev/null +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_sol.py +# This script creates an AtmEnsAnalysis object +# and runs the execute method +# which executes the global atm local ensemble analysis in solver mode +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlsol') + + # Initialize and execute JEDI ensemble DA analysis in solver mode + AtmEnsAnl.initialize_jedi() + AtmEnsAnl.execute(config.APRUN_ATMENSANLSOL, ['fv3jedi', 'localensembleda']) diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh index 75b1f927bc..73637a0d55 100755 --- a/scripts/exglobal_cleanup.sh +++ b/scripts/exglobal_cleanup.sh @@ -11,14 +11,6 @@ DATAfcst="${DATAROOT}/${RUN}fcst.${PDY:-}${cyc}" if [[ -d "${DATAfcst}" ]]; then rm -rf "${DATAfcst}"; fi #DATAefcs="${DATAROOT}/${RUN}efcs???${PDY:-}${cyc}" rm -rf "${DATAROOT}/${RUN}efcs"*"${PDY:-}${cyc}" - -# In XML, DATAROOT is defined as: -#DATAROOT="${STMP}/RUNDIRS/${PSLOT}/${RUN}.${PDY}${cyc}" -# cleanup is only executed after the entire cycle is successfully completed. -# removing DATAROOT should be possible if that is the case. -rm -rf "${DATAROOT}" - -echo "Cleanup ${DATAROOT} completed!" ############################################################### if [[ "${CLEANUP_COM:-YES}" == NO ]] ; then @@ -49,10 +41,10 @@ function remove_files() { find_exclude_string="${find_exclude_string[*]/%-or}" # Remove all regular files that do not match # shellcheck disable=SC2086 - find "${directory}" -type f -not \( ${find_exclude_string} \) -delete + find "${directory}" -type f -not \( ${find_exclude_string} \) -ignore_readdir_race -delete # Remove all symlinks that do not match # shellcheck disable=SC2086 - find "${directory}" -type l -not \( ${find_exclude_string} \) -delete + find "${directory}" -type l -not \( ${find_exclude_string} \) -ignore_readdir_race -delete # Remove any empty directories find "${directory}" -type d -empty -delete } @@ -113,3 +105,16 @@ if (( GDATE < RDATE )); then fi deletion_target="${ROTDIR}/${RUN}.${RDATE:0:8}" if [[ -d ${deletion_target} ]]; then rm -rf "${deletion_target}"; fi + +# sync and wait to avoid filesystem synchronization issues +sync && sleep 1 + +# Finally, delete DATAROOT. +# This will also delete the working directory, so save it until the end. +# In XML, DATAROOT is defined as: +#DATAROOT="${STMP}/RUNDIRS/${PSLOT}/${RUN}.${PDY}${cyc}" +# cleanup is only executed after the entire cycle is successfully completed. +# removing DATAROOT should be possible if that is the case. +rm -rf "${DATAROOT}" + +echo "Cleanup ${DATAROOT} completed!" diff --git a/scripts/exglobal_stage_ic.py b/scripts/exglobal_stage_ic.py new file mode 100755 index 0000000000..d737d83b47 --- /dev/null +++ b/scripts/exglobal_stage_ic.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 + +import os + +from pygfs.task.stage_ic import Stage +from wxflow import AttrDict, Logger, cast_strdict_as_dtypedict, logit + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +@logit(logger) +def main(): + + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the Stage object + stage = Stage(config) + + # Pull out all the configuration keys needed to run stage job + keys = ['RUN', 'MODE', 'EXP_WARM_START', 'NMEM_ENS', + 'assim_freq', 'current_cycle', 'previous_cycle', + 'ROTDIR', 'ICSDIR', 'STAGE_IC_YAML_TMPL', 'DO_JEDIATMVAR', + 'OCNRES', 'waveGRD', 'ntiles', 'DOIAU', 'DO_JEDIOCNVAR', + 'REPLAY_ICS', 'DO_WAVE', 'DO_OCN', 'DO_ICE', 'DO_NEST'] + + stage_dict = AttrDict() + for key in keys: + # Make sure OCNRES is three digits + if key == "OCNRES": + stage.task_config.OCNRES = f"{stage.task_config.OCNRES :03d}" + stage_dict[key] = stage.task_config[key] + + # Also import all COM* directory and template variables + for key in stage.task_config.keys(): + if key.startswith("COM"): + stage_dict[key] = stage.task_config[key] + + # Stage ICs + stage.execute_stage(stage_dict) + + +if __name__ == '__main__': + main() diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh deleted file mode 100755 index 32356cd724..0000000000 --- a/scripts/exglobal_stage_ic.sh +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env bash - -source "${USHgfs}/preamble.sh" - -# Locally scoped variables and functions -# shellcheck disable=SC2153 -GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H) -gPDY="${GDATE:0:8}" -gcyc="${GDATE:8:2}" - -RDATE=$(date --utc -d "${PDY} ${cyc} + ${OFFSET_START_HOUR} hours" +%Y%m%d%H) -DTG_PREFIX="${RDATE:0:8}.${RDATE:8:2}0000" - -MEMDIR_ARRAY=() -if [[ "${RUN:-}" = "gefs" ]]; then - # Populate the member_dirs array based on the value of NMEM_ENS - for ((ii = 0; ii <= "${NMEM_ENS:-0}"; ii++)); do - MEMDIR_ARRAY+=("mem$(printf "%03d" "${ii}")") - done -else - MEMDIR_ARRAY+=("") -fi - -# Initialize return code -err=0 - -error_message() { - echo "FATAL ERROR: Unable to copy ${1} to ${2} (Error code ${3})" -} - -############################################################### -for MEMDIR in "${MEMDIR_ARRAY[@]}"; do - - # Stage atmosphere initial conditions to ROTDIR - if [[ ${EXP_WARM_START:-".false."} = ".true." ]]; then - # Stage the FV3 restarts to ROTDIR (warm start) - RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL - [[ ! -d "${COM_ATMOS_RESTART_PREV}" ]] && mkdir -p "${COM_ATMOS_RESTART_PREV}" - prev_atmos_copy_list=(fv_core.res.nc coupler.res) - for ftype in "${prev_atmos_copy_list[@]}"; do - src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.${ftype}" - tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}" - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - done - for ftype in ca_data fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data; do - for ((tt = 1; tt <= ntiles; tt++)); do - src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.${ftype}.tile${tt}.nc" - if (( tt > 6 )) ; then - tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}.nest0$((tt-5)).tile${tt}.nc" - else - tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}.tile${tt}.nc" - fi - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - done - done - else - # Stage the FV3 cold-start initial conditions to ROTDIR - YMD=${PDY} HH=${cyc} declare_from_tmpl COM_ATMOS_INPUT - [[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}" - src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/gfs_ctrl.nc" - tgt="${COM_ATMOS_INPUT}/gfs_ctrl.nc" - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - for ftype in gfs_data sfc_data; do - for ((tt = 1; tt <= ntiles; tt++)); do - src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${ftype}.tile${tt}.nc" - tgt="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc" - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - done - if (( ntiles > 6 )); then - ${NLN} "${COM_ATMOS_INPUT}/${ftype}.tile7.nc" "${COM_ATMOS_INPUT}/${ftype}.nest02.tile7.nc" - fi - done - fi - - # Atmosphere Perturbation Files (usually used with replay ICS) - # Extra zero on MEMDIR ensure we have a number even if the string is empty - if (( $((10#0${MEMDIR:3})) > 0 )) && [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then - YMD=${PDY} HH=${cyc} declare_from_tmpl COM_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL - [[ ! -d "${COM_ATMOS_ANALYSIS}" ]] && mkdir -p "${COM_ATMOS_ANALYSIS}" - src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.fv3_perturbation.nc" - tgt="${COM_ATMOS_ANALYSIS}/${RUN}.t00z.atminc.nc" - ${NCP} "${src}" "${tgt}" - rc=${?} - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - fi - - # Stage ocean initial conditions to ROTDIR (warm start) - if [[ "${DO_OCN:-}" = "YES" ]]; then - RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL - [[ ! -d "${COM_OCEAN_RESTART_PREV}" ]] && mkdir -p "${COM_OCEAN_RESTART_PREV}" - src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.MOM.res.nc" - tgt="${COM_OCEAN_RESTART_PREV}/${DTG_PREFIX}.MOM.res.nc" - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - case "${OCNRES}" in - "500" | "100") - # Nothing more to do for these resolutions - ;; - "025" ) - for nn in $(seq 1 3); do - src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.MOM.res_${nn}.nc" - tgt="${COM_OCEAN_RESTART_PREV}/${DTG_PREFIX}.MOM.res_${nn}.nc" - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - done - ;; - *) - echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" - rc=1 - err=$((err + rc)) - ;; - esac - - # Ocean Perturbation Files - # Extra zero on MEMDIR ensure we have a number even if the string is empty - if (( $((10#0${MEMDIR:3})) > 0 )) && [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then - YMD=${PDY} HH=${cyc} declare_from_tmpl COM_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL - [[ ! -d "${COM_OCEAN_ANALYSIS}" ]] && mkdir -p "${COM_OCEAN_ANALYSIS}" - src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.mom6_perturbation.nc" - tgt="${COM_OCEAN_ANALYSIS}/mom6_increment.nc" - ${NCP} "${src}" "${tgt}" - rc=${?} - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - fi - - # TODO: Do mediator restarts exists in a ATMW configuration? - # TODO: No mediator is presumably involved in an ATMA configuration - if [[ ${EXP_WARM_START:-".false."} = ".true." ]]; then - # Stage the mediator restarts to ROTDIR (warm start/restart the coupled model) - RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_MED_RESTART_PREV:COM_MED_RESTART_TMPL - [[ ! -d "${COM_MED_RESTART_PREV}" ]] && mkdir -p "${COM_MED_RESTART_PREV}" - src="${BASE_CPLIC}/${CPL_MEDIC:-}/${PDY}${cyc}/${MEMDIR}/med/${DTG_PREFIX}.ufs.cpld.cpl.r.nc" - tgt="${COM_MED_RESTART_PREV}/${DTG_PREFIX}.ufs.cpld.cpl.r.nc" - if [[ -f "${src}" ]]; then - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - else - echo "WARNING: No mediator restarts available with warm_start=${EXP_WARM_START}" - fi - fi - - fi - - # Stage ice initial conditions to ROTDIR (warm start) - if [[ "${DO_ICE:-}" = "YES" ]]; then - RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL - [[ ! -d "${COM_ICE_RESTART_PREV}" ]] && mkdir -p "${COM_ICE_RESTART_PREV}" - src="${BASE_CPLIC}/${CPL_ICEIC:-}/${PDY}${cyc}/${MEMDIR}/ice/${DTG_PREFIX}.cice_model.res.nc" - tgt="${COM_ICE_RESTART_PREV}/${DTG_PREFIX}.cice_model.res.nc" - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - fi - - # Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc) - if [[ "${DO_WAVE:-}" = "YES" ]]; then - YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL - [[ ! -d "${COM_WAVE_RESTART_PREV}" ]] && mkdir -p "${COM_WAVE_RESTART_PREV}" - for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust - src="${BASE_CPLIC}/${CPL_WAVIC:-}/${PDY}${cyc}/${MEMDIR}/wave/${DTG_PREFIX}.restart.${grdID}" - tgt="${COM_WAVE_RESTART_PREV}/${DTG_PREFIX}.restart.${grdID}" - ${NCP} "${src}" "${tgt}" - rc=$? - ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" - err=$((err + rc)) - done - fi - -done # for MEMDIR in "${MEMDIR_ARRAY[@]}"; do - -############################################################### -# Check for errors and exit if any of the above failed -if [[ "${err}" -ne 0 ]]; then - echo "FATAL ERROR: Unable to copy ICs from ${BASE_CPLIC} to ${ROTDIR}; ABORT!" - exit "${err}" -fi - -############################################################## -# Exit cleanly -exit "${err}" diff --git a/sorc/build_all.sh b/sorc/build_all.sh index b6c4e6cc1c..79ae3c937f 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -131,16 +131,16 @@ build_jobs["ufs"]=8 big_jobs=$((big_jobs+1)) build_opts["ufs"]="${_wave_opt} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug}" -build_jobs["upp"]=2 +build_jobs["upp"]=1 build_opts["upp"]="${_build_debug}" -build_jobs["ufs_utils"]=2 +build_jobs["ufs_utils"]=1 build_opts["ufs_utils"]="${_verbose_opt} ${_build_debug}" build_jobs["gfs_utils"]=1 build_opts["gfs_utils"]="${_verbose_opt} ${_build_debug}" -build_jobs["ww3prepost"]=2 +build_jobs["ww3prepost"]=1 build_opts["ww3prepost"]="${_wave_opt} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug}" # Optional DA builds @@ -154,7 +154,7 @@ if [[ "${_build_ufsda}" == "YES" ]]; then fi fi if [[ "${_build_gsi}" == "YES" ]]; then - build_jobs["gsi_enkf"]=8 + build_jobs["gsi_enkf"]=2 build_opts["gsi_enkf"]="${_verbose_opt} ${_build_debug}" fi if [[ "${_build_gsi}" == "YES" || "${_build_ufsda}" == "YES" ]] ; then diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index ae30e7a645..92404afc01 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -162,20 +162,13 @@ cd "${HOMEgfs}/parm/ufs" || exit 1 ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/noahmptable.tbl" . cd "${HOMEgfs}/parm/post" || exit 1 -for file in postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GEFS-WAFS.txt \ - postxconfig-NT-GEFS-F00-aerosol.txt postxconfig-NT-GEFS-aerosol.txt \ - postxconfig-NT-GFS-ANL.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt \ - postxconfig-NT-GFS.txt postxconfig-NT-GFS-FLUX.txt postxconfig-NT-GFS-GOES.txt \ - postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-TWO.txt \ - params_grib2_tbl_new post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat +for file in params_grib2_tbl_new nam_micro_lookup.dat do ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/${file}" . done -for file in optics_luts_DUST.dat optics_luts_DUST_nasa.dat optics_luts_NITR_nasa.dat \ - optics_luts_SALT.dat optics_luts_SALT_nasa.dat optics_luts_SOOT.dat optics_luts_SOOT_nasa.dat \ - optics_luts_SUSO.dat optics_luts_SUSO_nasa.dat optics_luts_WASO.dat optics_luts_WASO_nasa.dat +for dir in gfs gefs do - ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/fix/chem/${file}" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/${dir}" . done for file in ice.csv ocean.csv ocnicepost.nml.jinja2 do @@ -202,7 +195,7 @@ done # Link these templates from ufs-weather-model cd "${HOMEgfs}/parm/ufs" || exit 1 -declare -a ufs_templates=("model_configure.IN" "model_configure_nest.IN"\ +declare -a ufs_templates=("model_configure.IN" "input_global_nest.nml.IN"\ "MOM_input_025.IN" "MOM_input_050.IN" "MOM_input_100.IN" "MOM_input_500.IN" \ "MOM6_data_table.IN" \ "ice_in.IN" \ @@ -219,7 +212,13 @@ declare -a ufs_templates=("model_configure.IN" "model_configure_nest.IN"\ "ufs.configure.s2swa.IN" \ "ufs.configure.s2swa_esmf.IN" \ "ufs.configure.leapfrog_atm_wav.IN" \ - "ufs.configure.leapfrog_atm_wav_esmf.IN" ) + "ufs.configure.leapfrog_atm_wav_esmf.IN" \ + "post_itag_gfs" \ + "postxconfig-NT-gfs.txt" \ + "postxconfig-NT-gfs_FH00.txt") + # TODO: The above postxconfig files in the UFSWM are not the same as the ones in UPP + # TODO: GEFS postxconfig files also need to be received from UFSWM + # See forecast_predet.sh where the UPP versions are used. They will need to be replaced with these. for file in "${ufs_templates[@]}"; do [[ -s "${file}" ]] && rm -f "${file}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/${file}" . @@ -238,7 +237,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then cd "${HOMEgfs}/fix" || exit 1 [[ ! -d gdas ]] && mkdir -p gdas cd gdas || exit 1 - for gdas_sub in fv3jedi gsibec obs soca; do + for gdas_sub in fv3jedi gsibec obs soca aero; do if [[ -d "${gdas_sub}" ]]; then rm -rf "${gdas_sub}" fi @@ -329,7 +328,7 @@ ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/ufs_model.x" . [[ -s "upp.x" ]] && rm -f upp.x ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/exec/upp.x" . -for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle; do +for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle fregrid; do [[ -s "${ufs_utilsexe}" ]] && rm -f "${ufs_utilsexe}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/exec/${ufs_utilsexe}" . done @@ -368,14 +367,17 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then declare -a JEDI_EXE=("gdas.x" \ "gdas_soca_gridgen.x" \ "gdas_soca_error_covariance_toolbox.x" \ + "gdas_fv3jedi_error_covariance_toolbox.x" \ "gdas_soca_setcorscales.x" \ "gdas_soca_diagb.x" \ "fv3jedi_plot_field.x" \ + "gdasapp_chem_diagb.x" \ "fv3jedi_fv3inc.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ "gdas_socahybridweights.x" \ + "gdasapp_land_ensrecenter.x" \ "bufr2ioda.x" \ "calcfIMS.exe" \ "apply_incr.exe" ) diff --git a/ush/check_ice_netcdf.sh b/ush/check_ice_netcdf.sh index 02ca4dae80..9d2d945a8b 100755 --- a/ush/check_ice_netcdf.sh +++ b/ush/check_ice_netcdf.sh @@ -19,12 +19,12 @@ if (( offset != 0 )); then fhr3=$(printf %03i "${fhri}") if (( fhri <= FHOUT_ICE_GFS )); then (( interval = FHOUT_ICE_GFS - cyc )) - ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model_data/ice/history/gefs.ice.t${cyc}z.${interval}hr_avg.f${fhr3}.nc + ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model/ice/history/gefs.ice.t${cyc}z.${interval}hr_avg.f${fhr3}.nc else - ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model_data/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr3}.nc + ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr3}.nc fi else - ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model_data/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr}.nc + ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr}.nc fi #Check if netcdf file exists. diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 8af9054972..d13cb0df0c 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -37,7 +37,7 @@ FV3_postdet() { fi # Get list of FV3 restart files - local file_list + local file_list file_list=$(FV3_restarts) echo "Copying FV3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" local fv3_file restart_file @@ -60,6 +60,24 @@ FV3_postdet() { break fi done + # Replace fv_tracer with aeroanl_fv_tracer restart files from current cycle (if found) + local nn + local use_anl_aero="YES" + for (( nn = 1; nn <= ntiles; nn++ )); do + test_tracer_file="${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.aeroanl_fv_tracer.res.tile${nn}.nc" + if [[ ! -f "${test_tracer_file}" ]]; then + use_anl_aero="NO" + echo "WARNING: File ${test_tracer_file} does not exist, will not replace any files from the aerosol analysis" + break + fi + done + if [[ ${use_anl_aero} == "YES" ]]; then + for (( nn = 1; nn <= ntiles; nn++ )); do + rm -f "${DATA}/INPUT/fv_tracer.res.tile${nn}.nc" + ${NCP} "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.aeroanl_fv_tracer.res.tile${nn}.nc" \ + "${DATA}/INPUT/fv_tracer.res.tile${nn}.nc" + done + fi # if [[ ${use_anl_aero} == "YES" ]]; then fi # if [[ "${RERUN}" != "YES" ]]; then fi # if [[ "${warm_start}" == ".true." ]]; then @@ -265,7 +283,7 @@ FV3_out() { ${NCP} "${DATA}/model_configure" "${COMOUT_CONF}/ufs.model_configure" ${NCP} "${DATA}/ufs.configure" "${COMOUT_CONF}/ufs.ufs.configure" ${NCP} "${DATA}/diag_table" "${COMOUT_CONF}/ufs.diag_table" - + # Determine the dates for restart files to be copied to COM local restart_date restart_dates diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index d1a332716a..9e08a12dd8 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -304,6 +304,7 @@ FV3_predet(){ phys_hydrostatic=".false." # enable heating in hydrostatic balance in non-hydrostatic simulation use_hydro_pressure=".false." # use hydrostatic pressure for physics make_nh=".true." # running in non-hydrostatic mode + pass_full_omega_to_physics_in_non_hydrostatic_mode=".true." else # hydrostatic options hydrostatic=".true." phys_hydrostatic=".false." # ignored when hydrostatic = T @@ -497,7 +498,7 @@ FV3_predet(){ local month mm for (( month = 1; month <=12; month++ )); do mm=$(printf %02d "${month}") - ${NCP} "${FIXgfs}/aer/merra2.aerclim.2003-2014.m${mm}.nc" "aeroclim.m${mm}.nc" + ${NCP} "${FIXgfs}/aer/merra2.aerclim.2014-2023.m${mm}.nc" "aeroclim.m${mm}.nc" done fi @@ -536,10 +537,16 @@ FV3_predet(){ # Inline UPP fix files if [[ "${WRITE_DOPOST:-}" == ".true." ]]; then - ${NCP} "${PARMgfs}/post/post_tag_gfs${LEVS}" "${DATA}/itag" - ${NCP} "${FLTFILEGFS:-${PARMgfs}/post/postxconfig-NT-GFS-TWO.txt}" "${DATA}/postxconfig-NT.txt" - ${NCP} "${FLTFILEGFSF00:-${PARMgfs}/post/postxconfig-NT-GFS-F00-TWO.txt}" "${DATA}/postxconfig-NT_FH00.txt" - ${NCP} "${POSTGRB2TBL:-${PARMgfs}/post/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new" + ${NCP} "${POSTGRB2TBL:-${PARMgfs}/post/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new" + ${NCP} "${PARMgfs}/ufs/post_itag_gfs" "${DATA}/itag" # TODO: Need a GEFS version when available in the UFS-weather-model + # TODO: These should be replaced with ones from the ufs-weather-model when available there + if [[ "${RUN}" =~ "gdas" || "${RUN}" =~ "gfs" ]]; then # RUN = gdas | enkfgdas | gfs | enkfgfs + ${NCP} "${PARMgfs}/post/gfs/postxconfig-NT-gfs-two.txt" "${DATA}/postxconfig-NT.txt" + ${NCP} "${PARMgfs}/post/gfs/postxconfig-NT-gfs-f00-two.txt" "${DATA}/postxconfig-NT_FH00.txt" + elif [[ "${RUN}" == "gefs" ]]; then # RUN = gefs + ${NCP} "${PARMgfs}/post/gefs/postxconfig-NT-gefs.txt" "${DATA}/postxconfig-NT.txt" + ${NCP} "${PARMgfs}/post/gefs/postxconfig-NT-gefs-f00.txt" "${DATA}/postxconfig-NT_FH00.txt" + fi fi } diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index 8a7d9b1091..0a7a8e8522 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -18,8 +18,17 @@ # 2018-05-30 Guang Ping Lou: Make sure all files are available. # 2019-10-10 Guang Ping Lou: Read in NetCDF files # 2024-03-03 Bo Cui: Add options to use different bufr table for different resolution NetCDF files +# 2024-08-08 Bo Cui: Update to handle one forecast at a time # echo "History: February 2003 - First implementation of this utility script" # +fhr="$1" +fhr_p="$2" +FINT="$3" +F00FLAG="$4" +workdir="$5" + +cd "${workdir}" || exit 2 + source "${USHgfs}/preamble.sh" if [[ "${F00FLAG}" == "YES" ]]; then @@ -37,63 +46,69 @@ else bufrflag=".false." fi -##fformat="nc" -##fformat="nemsio" +# check if read in bufr_ij_gfs_${CASE}.txt + +if [[ -s "${PARMgfs}/product/bufr_ij_gfs_${CASE}.txt" ]]; then + # use predetermined grid point(i,j) in bufr_gfs_${CASE}.txt + ${NLN} "${PARMgfs}/product/bufr_ij_gfs_${CASE}.txt" fort.7 + np1=0 +else + # find the nearest neighbor grid point(i,j) in the code + np1=1 + echo "No bufr_ij_gfs_${CASE}.txt For CASE ${CASE}" + echo "Find the nearest neighbor grid (i,j) in the code" +fi + +##fformat="netcdf" CLASS="class1fv3" cat << EOF > gfsparm &NAMMET levs=${LEVS},makebufr=${bufrflag}, dird="${COM_ATMOS_BUFR}/bufr", - nstart=${FSTART},nend=${FEND},nint=${FINT}, + nstart=${fhr},nend=${fhr},nint=${FINT}, nend1=${NEND1},nint1=${NINT1},nint3=${NINT3}, - nsfc=80,f00=${f00flag},fformat=${fformat},np1=0 + nsfc=80,f00=${f00flag},fformat=${fformat},np1=${np1}, + fnsig="sigf${fhr}", + fngrib="flxf${fhr}", + fngrib2="flxf${fhr_p}" / EOF -sleep_interval=10 -max_tries=1000 -for (( hr = 10#${FSTART}; hr <= 10#${FEND}; hr = hr + 10#${FINT} )); do - hh2=$(printf %02i "${hr}") - hh3=$(printf %03i "${hr}") - - #--------------------------------------------------------- - # Make sure all files are available: - filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${hh3}.${logfm}" - if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}"; then - echo "FATAL ERROR: COULD NOT LOCATE logf${hh3} file" - exit 2 - fi - - #------------------------------------------------------------------ - ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh3}.${atmfm}" "sigf${hh2}" - ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh3}.${atmfm}" "flxf${hh2}" -done +#--------------------------------------------------------- +# Make sure all files are available: + +filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${fhr}.${logfm}" +if [[ -z ${filename} ]]; then + echo "FATAL ERROR: COULD NOT LOCATE logf${fhr} file" + exit 2 +fi + +filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${fhr_p}.${logfm}" +if [[ -z ${filename} ]]; then + echo "FATAL ERROR: COULD NOT LOCATE logf${fhr_p} file" + exit 2 +fi + +#------------------------------------------------------------------ +${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${fhr}.${atmfm}" "sigf${fhr}" +${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${fhr}.${atmfm}" "flxf${fhr}" +${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${fhr_p}.${atmfm}" "flxf${fhr_p}" # define input BUFR table file. ${NLN} "${PARMgfs}/product/bufr_gfs_${CLASS}.tbl" fort.1 ${NLN} "${STNLIST:-${PARMgfs}/product/bufr_stalist.meteo.gfs}" fort.8 -case "${CASE}" in - "C768") - ${NLN} "${PARMgfs}/product/bufr_ij13km.txt" fort.7 - ;; - "C1152") - ${NLN} "${PARMgfs}/product/bufr_ij9km.txt" fort.7 - ;; - *) - echo "WARNING: No bufr table for this resolution, using the one for C768" - ${NLN} "${PARMgfs}/product/bufr_ij13km.txt" fort.7 - ;; -esac - -${APRUN_POSTSND} "${EXECgfs}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}" + +#------------------------------------------------------------------ +"${EXECgfs}/${pgm}" < gfsparm > "out_gfs_bufr_${fhr}" + export err=$? if [[ "${err}" -ne 0 ]]; then echo "GFS postsnd job error, Please check files " - echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh2}.${atmfm}" - echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh2}.${atmfm}" + echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${fhr}.${atmfm}" + echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${fhr}.${atmfm}" err_chk fi diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index 7e8e065d26..8f102fe298 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -38,9 +38,9 @@ local NUM_FILES=${NUM_FILES:-2} local FILENAME_BASE="'atm' 'sfc'" # OUTPUT_GRID local OUTPUT_FILE="'${OUTPUT_FILETYPE_ATM}' '${OUTPUT_FILETYPE_SFC}'" -local ZSTANDARD_LEVEL=0 -local IDEFLATE=0 # netCDF zlib lossless compression (0-9); 0: no compression -local QUANTIZE_NSD=${QUANTIZE_NSD:-0} # netCDF compression +local ZSTANDARD_LEVEL=${zstandard_level:-0} +local IDEFLATE=${ideflate:-0} # netCDF zlib lossless compression (0-9); 0: no compression +local QUANTIZE_NSD=${quantize_nsd:-0} # netCDF compression local ICHUNK2D=$((4*restile)) local JCHUNK2D=$((2*restile)) local ICHUNK3D=$((4*restile)) @@ -55,7 +55,7 @@ local IAU_OFFSET=${IAU_OFFSET:-0} if [[ "${DO_NEST:-NO}" == "YES" ]] ; then local NEST_IMO=${npx_nest} local NEST_JMO=${npy_nest} - template="${PARMgfs}/ufs/model_configure_nest.IN" + template="${PARMgfs}/ufs/input_global_nest.nml.IN" else template="${PARMgfs}/ufs/model_configure.IN" fi diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 60f44a721a..617ecff719 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -134,6 +134,7 @@ cat > input.nml < "${nml_file}" < None: + """Constructor for JEDI objects + + This method will construct a Jedi object. + This includes: + - save a copy of task_config for provenance + - set the default JEDI YAML and executable names + - set an empty AttrDict for the JEDI config + - set the default directory for J2-YAML templates + + Parameters + ---------- + task_config: AttrDict + Attribute-dictionary of all configuration variables associated with a GDAS task. + yaml_name: str, optional + Name of YAML file for JEDI configuration + + Returns + ---------- + None + """ + + # For provenance, save incoming task_config as a private attribute of JEDI object + self._task_config = task_config + + _exe_name = os.path.basename(task_config.JEDIEXE) + + self.exe = os.path.join(task_config.DATA, _exe_name) + if yaml_name: + self.yaml = os.path.join(task_config.DATA, yaml_name + '.yaml') + else: + self.yaml = os.path.join(task_config.DATA, os.path.splitext(_exe_name)[0] + '.yaml') + self.config = AttrDict() + self.j2tmpl_dir = os.path.join(task_config.PARMgfs, 'gdas') + + @logit(logger) + def set_config(self, task_config: AttrDict, algorithm: Optional[str] = None) -> AttrDict: + """Compile a JEDI configuration dictionary from a template file and save to a YAML file + + Parameters + ---------- + task_config : AttrDict + Dictionary of all configuration variables associated with a GDAS task. + algorithm (optional) : str + Name of the algorithm used to generate the JEDI configuration dictionary. + It will override the algorithm set in the task_config.JCB_<>_YAML file. + + Returns + ---------- + None + """ + + if 'JCB_BASE_YAML' in task_config.keys(): + # Step 1: Fill templates of the JCB base YAML file + jcb_config = parse_j2yaml(task_config.JCB_BASE_YAML, task_config) + + # Step 2: If algorithm is present then override the algorithm in the JEDI + # config. Otherwise, if the algorithm J2-YAML is present, fill + # its templates and merge. + if algorithm: + jcb_config['algorithm'] = algorithm + elif 'JCB_ALGO' in task_config.keys(): + jcb_config['algorithm'] = task_config.JCB_ALGO + elif 'JCB_ALGO_YAML' in task_config.keys(): + jcb_algo_config = parse_j2yaml(task_config.JCB_ALGO_YAML, task_config) + jcb_config.update(jcb_algo_config) + + # Step 3: Generate the JEDI YAML using JCB + self.config = render(jcb_config) + elif 'JEDIYAML' in task_config.keys(): + # Generate JEDI YAML without using JCB + self.config = parse_j2yaml(task_config.JEDIYAML, task_config, + searchpath=self.j2tmpl_dir) + else: + logger.exception(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") + raise KeyError(f"FATAL ERROR: Task config must contain JCB_BASE_YAML or JEDIYAML") + + @logit(logger) + def execute(self, task_config: AttrDict, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: + """Execute JEDI application + + Parameters + ---------- + task_config: AttrDict + Attribute-dictionary of all configuration variables associated with a GDAS task. + aprun_cmd: str + String comprising the run command for the JEDI executable. + jedi_args (optional): List + List of strings comprising optional input arguments for the JEDI executable. + + Returns + ---------- + jedi_config: AttrDict + Attribute-dictionary of JEDI configuration rendered from a template. + """ + + chdir(task_config.DATA) + + exec_cmd = Executable(aprun_cmd) + exec_cmd.add_default_arg(self.exe) + if jedi_args: + for arg in jedi_args: + exec_cmd.add_default_arg(arg) + exec_cmd.add_default_arg(self.yaml) + + try: + exec_cmd() + except OSError: + raise OSError(f"FATAL ERROR: Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"FATAL ERROR: An error occurred during execution of {exec_cmd}") + + @staticmethod + @logit(logger) + def link_exe(task_config: AttrDict) -> None: + """Link JEDI executable to run directory + + Parameters + ---------- + task_config: AttrDict + Attribute-dictionary of all configuration variables associated with a GDAS task. + + Returns + ---------- + None + """ + + # TODO: linking is not permitted per EE2. + # Needs work in JEDI to be able to copy the exec. [NOAA-EMC/GDASApp#1254] + logger.warn("Linking is not permitted per EE2.") + exe_dest = os.path.join(task_config.DATA, os.path.basename(task_config.JEDIEXE)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(task_config.JEDIEXE, exe_dest) + + @logit(logger) + def get_obs_dict(self, task_config: AttrDict) -> Dict[str, Any]: + """Compile a dictionary of observation files to copy + + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of + observation files that are to be copied to the run directory + from the observation input directory + + Parameters + ---------- + task_config: AttrDict + Attribute-dictionary of all configuration variables associated with a GDAS task. + + Returns + ---------- + obs_dict: Dict + a dictionary containing the list of observation files to copy for FileHandler + """ + + observations = find_value_in_nested_dict(self.config, 'observations') + + copylist = [] + for ob in observations['observers']: + obfile = ob['obs space']['obsdatain']['engine']['obsfile'] + basename = os.path.basename(obfile) + copylist.append([os.path.join(task_config.COM_OBS, basename), obfile]) + obs_dict = { + 'mkdir': [os.path.join(task_config.DATA, 'obs')], + 'copy': copylist + } + return obs_dict + + @logit(logger) + def get_bias_dict(self, task_config: AttrDict) -> Dict[str, Any]: + """Compile a dictionary of observation files to copy + + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of + observation bias correction files that are to be copied to the run directory + from the component directory. + TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in + `analysis.py` and should be implemented in the component where this is applicable. + + Parameters + ---------- + task_config: AttrDict + Attribute-dictionary of all configuration variables associated with a GDAS task. + + Returns + ---------- + bias_dict: Dict + a dictionary containing the list of observation bias files to copy for FileHandler + """ + + observations = find_value_in_nested_dict(self.config, 'observations') + + copylist = [] + for ob in observations['observers']: + if 'obs bias' in ob.keys(): + obfile = ob['obs bias']['input file'] + obdir = os.path.dirname(obfile) + basename = os.path.basename(obfile) + prefix = '.'.join(basename.split('.')[:-2]) + for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']: + bfile = f"{prefix}.{file}" + copylist.append([os.path.join(task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + # TODO: Why is this specific to ATMOS? + + bias_dict = { + 'mkdir': [os.path.join(task_config.DATA, 'bc')], + 'copy': copylist + } + return bias_dict + + +@logit(logger) +def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: + """ + Recursively search through a nested dictionary and return the value for the target key. + This returns the first target key it finds. So if a key exists in a subsequent + nested dictionary, it will not be found. + + Parameters + ---------- + nested_dict : Dict + Dictionary to search + target_key : str + Key to search for + + Returns + ------- + Any + Value of the target key + + Raises + ------ + KeyError + If key is not found in dictionary + + TODO: if this gives issues due to landing on an incorrect key in the nested + dictionary, we will have to implement a more concrete method to search for a key + given a more complete address. See resolved conversations in PR 2387 + + # Example usage: + nested_dict = { + 'a': { + 'b': { + 'c': 1, + 'd': { + 'e': 2, + 'f': 3 + } + }, + 'g': 4 + }, + 'h': { + 'i': 5 + }, + 'j': { + 'k': 6 + } + } + + user_key = input("Enter the key to search for: ") + result = find_value_in_nested_dict(nested_dict, user_key) + """ + + if not isinstance(nested_dict, dict): + raise TypeError(f"Input is not of type(dict)") + + result = nested_dict.get(target_key) + if result is not None: + return result + + for value in nested_dict.values(): + if isinstance(value, dict): + try: + result = find_value_in_nested_dict(value, target_key) + if result is not None: + return result + except KeyError: + pass + + raise KeyError(f"Key '{target_key}' not found in the nested dictionary") diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index ccc5fb601a..0389e109a1 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -82,29 +82,18 @@ def initialize(self: Analysis) -> None: jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() - # stage berror files - # copy BUMP files, otherwise it will assume ID matrix - if self.task_config.get('STATICB_TYPE', 'identity') in ['bump']: - FileHandler(self.get_berror_dict(self.task_config)).sync() - - # stage backgrounds - FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync() + # stage files from COM and create working directories + logger.info(f"Staging files prescribed from {self.task_config.AERO_STAGE_VARIATIONAL_TMPL}") + aero_var_stage_list = parse_j2yaml(self.task_config.AERO_STAGE_VARIATIONAL_TMPL, self.task_config) + FileHandler(aero_var_stage_list).sync() # generate variational YAML file logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") - # need output dir for diags and anl - logger.debug("Create empty output [anl, diags] directories to receive output from executable") - newdirs = [ - os.path.join(self.task_config['DATA'], 'anl'), - os.path.join(self.task_config['DATA'], 'diags'), - ] - FileHandler({'mkdir': newdirs}).sync() - @logit(logger) - def execute(self: Analysis) -> None: + def variational(self: Analysis) -> None: chdir(self.task_config.DATA) @@ -140,59 +129,33 @@ def finalize(self: Analysis) -> None: """ # ---- tar up diags # path of output tar statfile - aerostat = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") + logger.info('Preparing observation space diagnostics for archiving') + aerostat = os.path.join(self.task_config.COMOUT_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") # get list of diag files to put in tarball diags = glob.glob(os.path.join(self.task_config['DATA'], 'diags', 'diag*nc4')) # gzip the files first for diagfile in diags: + logger.info(f'Adding {diagfile} to tar file') with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: f_out.writelines(f_in) + # ---- add increments to RESTART files + logger.info('Adding increments to RESTART files') + self._add_fms_cube_sphere_increments() + + # copy files back to COM + logger.info(f"Copying files to COM based on {self.task_config.AERO_FINALIZE_VARIATIONAL_TMPL}") + aero_var_final_list = parse_j2yaml(self.task_config.AERO_FINALIZE_VARIATIONAL_TMPL, self.task_config) + FileHandler(aero_var_final_list).sync() + # open tar file for writing with tarfile.open(aerostat, "w") as archive: for diagfile in diags: diaggzip = f"{diagfile}.gz" archive.add(diaggzip, arcname=os.path.basename(diaggzip)) - - # copy full YAML from executable to ROTDIR - src = os.path.join(self.task_config['DATA'], f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") - dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") - yaml_copy = { - 'mkdir': [self.task_config.COM_CHEM_ANALYSIS], - 'copy': [[src, dest]] - } - FileHandler(yaml_copy).sync() - - # ---- copy RESTART fv_tracer files for future reference - if self.task_config.DOIAU: - bkgtime = self.task_config.AERO_WINDOW_BEGIN - else: - bkgtime = self.task_config.current_cycle - template = '{}.fv_tracer.res.tile{}.nc'.format(to_fv3time(bkgtime), '{tilenum}') - bkglist = [] - for itile in range(1, self.task_config.ntiles + 1): - tracer = template.format(tilenum=itile) - src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, tracer) - dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f'aeroges.{tracer}') - bkglist.append([src, dest]) - FileHandler({'copy': bkglist}).sync() - - # ---- add increments to RESTART files - logger.info('Adding increments to RESTART files') - self._add_fms_cube_sphere_increments() - - # ---- move increments to ROTDIR - logger.info('Moving increments to ROTDIR') - template = f'aeroinc.{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' - inclist = [] - for itile in range(1, self.task_config.ntiles + 1): - tracer = template.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'anl', tracer) - dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, tracer) - inclist.append([src, dest]) - FileHandler({'copy': inclist}).sync() + logger.info(f'Saved diags to {aerostat}') def clean(self): super().clean() @@ -209,7 +172,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None: restart_template = f'{to_fv3time(bkgtime)}.fv_tracer.res.tile{{tilenum}}.nc' increment_template = f'{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) - bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template) + bkg_template = os.path.join(self.task_config.DATA, 'anl', restart_template) # get list of increment vars incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] @@ -232,38 +195,7 @@ def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: bkg_dict: Dict a dictionary containing the list of model background files to copy for FileHandler """ - # NOTE for now this is FV3 RESTART files and just assumed to be fh006 - - # get FV3 RESTART files, this will be a lot simpler when using history files - rst_dir = task_config.COM_ATMOS_RESTART_PREV - run_dir = os.path.join(task_config['DATA'], 'bkg') - - # Start accumulating list of background files to copy - bkglist = [] - - # if using IAU, we can use FGAT - bkgtimes = [] - begintime = task_config.previous_cycle - for fcsthr in task_config.aero_bkg_fhr: - bkgtimes.append(add_to_datetime(begintime, to_timedelta(f"{fcsthr}H"))) - - # now loop over background times - for bkgtime in bkgtimes: - # aerosol DA needs coupler - basename = f'{to_fv3time(bkgtime)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # aerosol DA only needs core/tracer - for ftype in ['core', 'tracer']: - template = f'{to_fv3time(bkgtime)}.fv_{ftype}.res.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': [run_dir], - 'copy': bkglist, - } + bkg_dict = {} return bkg_dict @logit(logger) @@ -285,34 +217,5 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: berror_dict: Dict a dictionary containing the list of background error files to copy for FileHandler """ - # aerosol static-B needs nicas, cor_rh, cor_rv and stddev files. - b_dir = config.BERROR_DATA_DIR - b_datestr = to_fv3time(config.BERROR_DATE) - berror_list = [] - - for ftype in ['stddev']: - coupler = f'{b_datestr}.{ftype}.coupler.res' - berror_list.append([ - os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) - ]) - template = f'{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - tracer = template.format(tilenum=itile) - berror_list.append([ - os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) - ]) - radius = 'cor_aero_universe_radius' - berror_list.append([ - os.path.join(b_dir, radius), os.path.join(config.DATA, 'berror', radius) - ]) - nproc = config.ntiles * config.layout_x * config.layout_y - for nn in range(1, nproc + 1): - berror_list.append([ - os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), - os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') - ]) - berror_dict = { - 'mkdir': [os.path.join(config.DATA, 'berror')], - 'copy': berror_list, - } + berror_dict = {} return berror_dict diff --git a/ush/python/pygfs/task/aero_bmatrix.py b/ush/python/pygfs/task/aero_bmatrix.py new file mode 100644 index 0000000000..c652bad558 --- /dev/null +++ b/ush/python/pygfs/task/aero_bmatrix.py @@ -0,0 +1,294 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import List, Dict, Any, Union + +from wxflow import (AttrDict, FileHandler, rm_p, + add_to_datetime, to_fv3time, to_timedelta, + to_fv3time, chdir, Executable, WorkflowException, + parse_j2yaml, save_as_yaml, logit) +from pygfs.task.bmatrix import BMatrix + +logger = getLogger(__name__.split('.')[-1]) + + +class AerosolBMatrix(BMatrix): + """ + Class for global aerosol BMatrix tasks + """ + @logit(logger, name="AerosolBMatrix") + def __init__(self, config: Dict[str, Any]) -> None: + super().__init__(config) + + _res = int(self.task_config['CASE'][1:]) + _res_anl = int(self.task_config['CASE_ANL'][1:]) + + _bmat_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_diagb.yaml") + _diffusion_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_diffusion.yaml") + _convertstate_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.chem_convertstate.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.task_config['LEVS'] - 1, + 'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')), + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'bmat_yaml': _bmat_yaml, + 'diffusion_yaml': _diffusion_yaml, + 'convertstate_yaml': _convertstate_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.task_config, **local_dict) + + @logit(logger) + def initialize(self: BMatrix) -> None: + super().initialize() + # stage fix files + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage backgrounds + logger.info(f"Staging backgrounds prescribed from {self.task_config.AERO_BMATRIX_STAGE_TMPL}") + aero_bmat_stage_list = parse_j2yaml(self.task_config.AERO_BMATRIX_STAGE_TMPL, self.task_config) + FileHandler(aero_bmat_stage_list).sync() + + # generate convert state YAML file + logger.info(f"Generate convert state YAML file: {self.task_config.convertstate_yaml}") + self.task_config.convertstate_config = parse_j2yaml(self.task_config.INTERPYAML, + self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + save_as_yaml(self.task_config.convertstate_config, self.task_config.convertstate_yaml) + logger.info(f"Wrote convert state YAML to: {self.task_config.convertstate_yaml}") + + # generate diagb YAML file + logger.info(f"Generate bmat YAML file: {self.task_config.bmat_yaml}") + self.task_config.bmat_config = parse_j2yaml(self.task_config.BMATYAML, + self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + save_as_yaml(self.task_config.bmat_config, self.task_config.bmat_yaml) + logger.info(f"Wrote bmat YAML to: {self.task_config.bmat_yaml}") + + # generate diffusion parameters YAML file + logger.info(f"Generate diffusion YAML file: {self.task_config.diffusion_yaml}") + self.task_config.diffusion_config = parse_j2yaml(self.task_config.DIFFUSIONYAML, + self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + save_as_yaml(self.task_config.diffusion_config, self.task_config.diffusion_yaml) + logger.info(f"Wrote diffusion YAML to: {self.task_config.diffusion_yaml}") + + # link executable to run directory + self.link_bmatexe() + self.link_diffusion_exe() + self.link_jediexe() + + @logit(logger) + def interpBackground(self) -> None: + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_AEROANLGENB) + exec_name = os.path.join(self.task_config.DATA, 'gdas.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('convertstate') + exec_cmd.add_default_arg(self.task_config.convertstate_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def computeVariance(self) -> None: + + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_AEROANLGENB) + exec_name = os.path.join(self.task_config.DATA, 'gdasapp_chem_diagb.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.bmat_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def computeDiffusion(self) -> None: + + chdir(self.task_config.DATA) + + exec_cmd_diffusion = Executable(self.task_config.APRUN_AEROANLGENB) + exec_name_diffusion = os.path.join(self.task_config.DATA, 'gdas_fv3jedi_error_covariance_toolbox.x') + exec_cmd_diffusion.add_default_arg(exec_name_diffusion) + exec_cmd_diffusion.add_default_arg(self.task_config.diffusion_yaml) + + try: + logger.debug(f"Executing {exec_cmd_diffusion}") + exec_cmd_diffusion() + except OSError: + raise OSError(f"Failed to execute {exec_cmd_diffusion}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd_diffusion}") + + pass + + @logit(logger) + def finalize(self) -> None: + super().finalize() + # save files to COMOUT + logger.info(f"Saving files to COMOUT based on {self.task_config.AERO_BMATRIX_FINALIZE_TMPL}") + aero_bmat_finalize_list = parse_j2yaml(self.task_config.AERO_BMATRIX_FINALIZE_TMPL, self.task_config) + FileHandler(aero_bmat_finalize_list).sync() + + @logit(logger) + def link_jediexe(self) -> None: + """ + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.JEDIEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.info(f"Link executable {exe_src} to DATA/") + logger.warn("Linking is not permitted per EE2.") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return exe_dest + + @logit(logger) + def link_bmatexe(self) -> None: + """ + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.BMATEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.info(f"Link executable {exe_src} to DATA/") + logger.warn("Linking is not permitted per EE2.") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return + + @logit(logger) + def link_diffusion_exe(self) -> None: + """ + + This method links a JEDI (fv3jedi_error_covariance_toolbox.x) + executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + + exe_src_diffusion = self.task_config.DIFFUSIONEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.info(f"Link executable {exe_src_diffusion} to DATA/") + logger.warn("Linking is not permitted per EE2.") + exe_dest_diffusion = os.path.join(self.task_config.DATA, os.path.basename(exe_src_diffusion)) + if os.path.exists(exe_dest_diffusion): + rm_p(exe_dest_diffusion) + os.symlink(exe_src_diffusion, exe_dest_diffusion) + + return + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, core, tracer) + that are needed for global aerosol DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = task_config.COM_ATMOS_RESTART_PREV + run_dir = os.path.join(task_config['DATA'], 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # if using IAU, we can use FGAT + bkgtimes = [] + begintime = task_config.previous_cycle + for fcsthr in task_config.aero_bkg_fhr: + bkgtimes.append(add_to_datetime(begintime, to_timedelta(f"{fcsthr}H"))) + + # now loop over background times + for bkgtime in bkgtimes: + # aerosol DA needs coupler + basename = f'{to_fv3time(bkgtime)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # aerosol DA only needs core/tracer + for ftype in ['core', 'tracer']: + template = f'{to_fv3time(bkgtime)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist, + } + return bkg_dict diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index e407cf1765..6f7d3dfc68 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -28,6 +28,8 @@ def __init__(self, config: Dict[str, Any]) -> None: super().__init__(config) # Store location of GDASApp jinja2 templates self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas') + # fix ocnres + self.task_config.OCNRES = f"{self.task_config.OCNRES :03d}" def initialize(self) -> None: super().initialize() @@ -196,7 +198,7 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li @logit(logger) def link_jediexe(self) -> None: - """Compile a dictionary of background error files to copy + """ This method links a JEDI executable to the run directory diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index 953a856192..14cd015601 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -63,9 +63,6 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str if not os.path.isdir(arch_dict.ROTDIR): raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!") - if arch_dict.RUN == "gefs": - raise NotImplementedError("FATAL ERROR: Archiving is not yet set up for GEFS runs") - if arch_dict.RUN in ["gdas", "gfs"]: # Copy the cyclone track files and rename the experiments @@ -75,7 +72,7 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str archive_parm = os.path.join(arch_dict.PARMgfs, "archive") # Collect the dataset to archive locally - arcdir_j2yaml = os.path.join(archive_parm, "arcdir.yaml.j2") + arcdir_j2yaml = os.path.join(archive_parm, f"{arch_dict.NET}_arcdir.yaml.j2") # Add the glob.glob function for capturing log filenames # TODO remove this kludge once log filenames are explicit @@ -117,6 +114,9 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str self.tar_cmd = "" return arcdir_set, [] + if arch_dict.NET == "gefs": + raise NotImplementedError("GEFS archiving is not yet implemented!") + master_yaml = "master_" + arch_dict.RUN + ".yaml.j2" parsed_sets = parse_j2yaml(os.path.join(archive_parm, master_yaml), diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 4e9d37335c..8d340a5b73 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -5,33 +5,49 @@ import gzip import tarfile from logging import getLogger -from typing import Dict, List, Any +from pprint import pformat +from typing import Optional, Dict, Any from wxflow import (AttrDict, FileHandler, add_to_datetime, to_fv3time, to_timedelta, to_YMDH, - chdir, + Task, parse_j2yaml, save_as_yaml, - logit, - Executable, - WorkflowException) -from pygfs.task.analysis import Analysis + logit) +from pygfs.jedi import Jedi logger = getLogger(__name__.split('.')[-1]) -class AtmAnalysis(Analysis): +class AtmAnalysis(Task): """ - Class for global atm analysis tasks + Class for JEDI-based global atm analysis tasks """ @logit(logger, name="AtmAnalysis") - def __init__(self, config): + def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): + """Constructor global atm analysis task + + This method will construct a global atm analysis task. + This includes: + - extending the task_config attribute AttrDict to include parameters required for this task + - instantiate the Jedi attribute object + + Parameters + ---------- + config: Dict + dictionary object containing task configuration + yaml_name: str, optional + name of YAML file for JEDI configuration + + Returns + ---------- + None + """ super().__init__(config) _res = int(self.task_config.CASE[1:]) _res_anl = int(self.task_config.CASE_ANL[1:]) _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) - _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -48,7 +64,6 @@ def __init__(self, config): 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", - 'jedi_yaml': _jedi_yaml, 'atm_obsdatain_path': f"{self.task_config.DATA}/obs/", 'atm_obsdataout_path': f"{self.task_config.DATA}/diags/", 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications @@ -58,30 +73,87 @@ def __init__(self, config): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) + # Create JEDI object + self.jedi = Jedi(self.task_config, yaml_name) + @logit(logger) - def initialize(self: Analysis) -> None: + def initialize_jedi(self): + """Initialize JEDI application + + This method will initialize a JEDI application used in the global atm analysis. + This includes: + - generating and saving JEDI YAML config + - linking the JEDI executable + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + # get JEDI-to-FV3 increment converter config and save to YAML file + logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") + self.jedi.set_config(self.task_config) + logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") + + # save JEDI config to YAML file + logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") + save_as_yaml(self.jedi.config, self.jedi.yaml) + + # link JEDI executable + logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") + self.jedi.link_exe(self.task_config) + + @logit(logger) + def initialize_analysis(self) -> None: """Initialize a global atm analysis - This method will initialize a global atm analysis using JEDI. + This method will initialize a global atm analysis. This includes: + - staging observation files + - staging bias correction files - staging CRTM fix files - staging FV3-JEDI fix files - staging B error files - staging model backgrounds - - generating a YAML file for the JEDI executable - creating output directories + + Parameters + ---------- + None + + Returns + ---------- + None """ super().initialize() + # stage observations + logger.info(f"Staging list of observation files generated from JEDI config") + obs_dict = self.jedi.get_obs_dict(self.task_config) + FileHandler(obs_dict).sync() + logger.debug(f"Observation files:\n{pformat(obs_dict)}") + + # stage bias corrections + logger.info(f"Staging list of bias correction files generated from JEDI config") + bias_dict = self.jedi.get_bias_dict(self.task_config) + FileHandler(bias_dict).sync() + logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") - crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) - FileHandler(crtm_fix_list).sync() + crtm_fix_dict = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) + FileHandler(crtm_fix_dict).sync() + logger.debug(f"CRTM fix files:\n{pformat(crtm_fix_dict)}") # stage fix files logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") - jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) - FileHandler(jedi_fix_list).sync() + jedi_fix_dict = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_dict).sync() + logger.debug(f"JEDI fix files:\n{pformat(jedi_fix_dict)}") # stage static background error files, otherwise it will assume ID matrix logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") @@ -90,22 +162,20 @@ def initialize(self: Analysis) -> None: else: berror_staging_dict = {} FileHandler(berror_staging_dict).sync() + logger.debug(f"Background error files:\n{pformat(berror_staging_dict)}") # stage ensemble files for use in hybrid background error if self.task_config.DOHYBVAR: logger.debug(f"Stage ensemble files for DOHYBVAR {self.task_config.DOHYBVAR}") fv3ens_staging_dict = parse_j2yaml(self.task_config.FV3ENS_STAGING_YAML, self.task_config) FileHandler(fv3ens_staging_dict).sync() + logger.debug(f"Ensemble files:\n{pformat(fv3ens_staging_dict)}") # stage backgrounds logger.info(f"Staging background files from {self.task_config.VAR_BKG_STAGING_YAML}") bkg_staging_dict = parse_j2yaml(self.task_config.VAR_BKG_STAGING_YAML, self.task_config) FileHandler(bkg_staging_dict).sync() - - # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") - save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") + logger.debug(f"Background files:\n{pformat(bkg_staging_dict)}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -116,54 +186,32 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def variational(self: Analysis) -> None: - - chdir(self.task_config.DATA) - - exec_cmd = Executable(self.task_config.APRUN_ATMANLVAR) - exec_name = os.path.join(self.task_config.DATA, 'gdas.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('fv3jedi') - exec_cmd.add_default_arg('variational') - exec_cmd.add_default_arg(self.task_config.jedi_yaml) + def execute(self, aprun_cmd: str, jedi_args: Optional[str] = None) -> None: + """Run JEDI executable - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") + This method will run JEDI executables for the global atm analysis - pass + Parameters + ---------- + aprun_cmd : str + Run command for JEDI application on HPC system + jedi_args : List + List of additional optional arguments for JEDI application - @logit(logger) - def init_fv3_increment(self: Analysis) -> None: - # Setup JEDI YAML file - self.task_config.jedi_yaml = os.path.join(self.task_config.DATA, - f"{self.task_config.JCB_ALGO}.yaml") - save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) + Returns + ---------- + None + """ - # Link JEDI executable to run directory - self.task_config.jedi_exe = self.link_jediexe() + if jedi_args: + logger.info(f"Executing {self.jedi.exe} {' '.join(jedi_args)} {self.jedi.yaml}") + else: + logger.info(f"Executing {self.jedi.exe} {self.jedi.yaml}") - @logit(logger) - def fv3_increment(self: Analysis) -> None: - # Run executable - exec_cmd = Executable(self.task_config.APRUN_ATMANLFV3INC) - exec_cmd.add_default_arg(self.task_config.jedi_exe) - exec_cmd.add_default_arg(self.task_config.jedi_yaml) - - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") + self.jedi.execute(self.task_config, aprun_cmd, jedi_args) @logit(logger) - def finalize(self: Analysis) -> None: + def finalize(self) -> None: """Finalize a global atm analysis This method will finalize a global atm analysis using JEDI. @@ -171,9 +219,16 @@ def finalize(self: Analysis) -> None: - tar output diag files and place in ROTDIR - copy the generated YAML file from initialize to the ROTDIR - copy the updated bias correction files to ROTDIR - - write UFS model readable atm incrment file + Parameters + ---------- + None + + Returns + ---------- + None """ + # ---- tar up diags # path of output tar statfile atmstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.APREFIX}atmstat") @@ -196,16 +251,19 @@ def finalize(self: Analysis) -> None: diaggzip = f"{diagfile}.gz" archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + # get list of yamls to copy to ROTDIR + yamls = glob.glob(os.path.join(self.task_config.DATA, '*atm*yaml')) + # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") - src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") - logger.debug(f"Copying {src} to {dest}") - yaml_copy = { - 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], - 'copy': [[src, dest]] - } - FileHandler(yaml_copy).sync() + for src in yamls: + yaml_base = os.path.splitext(os.path.basename(src))[0] + dest_yaml_name = f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.{yaml_base}.yaml" + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, dest_yaml_name) + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() # copy bias correction files to ROTDIR logger.info("Copy bias correction files from DATA/ to COM/") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index bd5112050e..55e72702b1 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -5,33 +5,52 @@ import gzip import tarfile from logging import getLogger -from typing import Dict, List +from pprint import pformat +from typing import Optional, Dict, Any from wxflow import (AttrDict, FileHandler, add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD, chdir, + Task, parse_j2yaml, save_as_yaml, logit, Executable, WorkflowException, Template, TemplateConstants) -from pygfs.task.analysis import Analysis +from pygfs.jedi import Jedi logger = getLogger(__name__.split('.')[-1]) -class AtmEnsAnalysis(Analysis): +class AtmEnsAnalysis(Task): """ - Class for global atmens analysis tasks + Class for JEDI-based global atmens analysis tasks """ @logit(logger, name="AtmEnsAnalysis") - def __init__(self, config): + def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): + """Constructor global atmens analysis task + + This method will construct a global atmens analysis task. + This includes: + - extending the task_config attribute AttrDict to include parameters required for this task + - instantiate the Jedi attribute object + + Parameters + ---------- + config: Dict + dictionary object containing task configuration + yaml_name: str, optional + name of YAML file for JEDI configuration + + Returns + ---------- + None + """ super().__init__(config) _res = int(self.task_config.CASE_ENS[1:]) _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) - _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -45,7 +64,6 @@ def __init__(self, config): 'OPREFIX': f"{self.task_config.EUPD_CYC}.t{self.task_config.cyc:02d}z.", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", - 'jedi_yaml': _jedi_yaml, 'atm_obsdatain_path': f"./obs/", 'atm_obsdataout_path': f"./diags/", 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications @@ -55,21 +73,56 @@ def __init__(self, config): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) + # Create JEDI object + self.jedi = Jedi(self.task_config, yaml_name) + + @logit(logger) + def initialize_jedi(self): + """Initialize JEDI application + + This method will initialize a JEDI application used in the global atmens analysis. + This includes: + - generating and saving JEDI YAML config + - linking the JEDI executable + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + # get JEDI config and save to YAML file + logger.info(f"Generating JEDI config: {self.jedi.yaml}") + self.jedi.set_config(self.task_config) + logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") + + # save JEDI config to YAML file + logger.info(f"Writing JEDI config to YAML file: {self.jedi.yaml}") + save_as_yaml(self.jedi.config, self.jedi.yaml) + + # link JEDI-to-FV3 increment converter executable + logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") + self.jedi.link_exe(self.task_config) + @logit(logger) - def initialize(self: Analysis) -> None: + def initialize_analysis(self) -> None: """Initialize a global atmens analysis - This method will initialize a global atmens analysis using JEDI. + This method will initialize a global atmens analysis. This includes: + - staging observation files + - staging bias correction files - staging CRTM fix files - staging FV3-JEDI fix files - staging model backgrounds - - generating a YAML file for the JEDI executable - creating output directories Parameters ---------- - Analysis: parent class for GDAS task + None Returns ---------- @@ -77,25 +130,35 @@ def initialize(self: Analysis) -> None: """ super().initialize() + # stage observations + logger.info(f"Staging list of observation files generated from JEDI config") + obs_dict = self.jedi.get_obs_dict(self.task_config) + FileHandler(obs_dict).sync() + logger.debug(f"Observation files:\n{pformat(obs_dict)}") + + # stage bias corrections + logger.info(f"Staging list of bias correction files generated from JEDI config") + bias_dict = self.jedi.get_bias_dict(self.task_config) + FileHandler(bias_dict).sync() + logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") - crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) - FileHandler(crtm_fix_list).sync() + crtm_fix_dict = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) + FileHandler(crtm_fix_dict).sync() + logger.debug(f"CRTM fix files:\n{pformat(crtm_fix_dict)}") # stage fix files logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") - jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) - FileHandler(jedi_fix_list).sync() + jedi_fix_dict = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_dict).sync() + logger.debug(f"JEDI fix files:\n{pformat(jedi_fix_dict)}") # stage backgrounds logger.info(f"Stage ensemble member background files") bkg_staging_dict = parse_j2yaml(self.task_config.LGETKF_BKG_STAGING_YAML, self.task_config) FileHandler(bkg_staging_dict).sync() - - # generate ensemble da YAML file - logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}") - save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) - logger.info(f"Wrote ensemble da YAML to: {self.task_config.jedi_yaml}") + logger.debug(f"Ensemble member background files:\n{pformat(bkg_staging_dict)}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -106,85 +169,47 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def letkf(self: Analysis) -> None: - """Execute a global atmens analysis + def execute(self, aprun_cmd: str, jedi_args: Optional[str] = None) -> None: + """Run JEDI executable - This method will execute a global atmens analysis using JEDI. - This includes: - - changing to the run directory - - running the global atmens analysis executable + This method will run JEDI executables for the global atmens analysis Parameters ---------- - Analysis: parent class for GDAS task - + aprun_cmd : str + Run command for JEDI application on HPC system + jedi_args : List + List of additional optional arguments for JEDI application Returns ---------- None """ - chdir(self.task_config.DATA) - - exec_cmd = Executable(self.task_config.APRUN_ATMENSANLLETKF) - exec_name = os.path.join(self.task_config.DATA, 'gdas.x') - - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('fv3jedi') - exec_cmd.add_default_arg('localensembleda') - exec_cmd.add_default_arg(self.task_config.jedi_yaml) - - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") - pass + if jedi_args: + logger.info(f"Executing {self.jedi.exe} {' '.join(jedi_args)} {self.jedi.yaml}") + else: + logger.info(f"Executing {self.jedi.exe} {self.jedi.yaml}") - @logit(logger) - def init_fv3_increment(self: Analysis) -> None: - # Setup JEDI YAML file - self.task_config.jedi_yaml = os.path.join(self.task_config.DATA, - f"{self.task_config.JCB_ALGO}.yaml") - save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) - - # Link JEDI executable to run directory - self.task_config.jedi_exe = self.link_jediexe() - - @logit(logger) - def fv3_increment(self: Analysis) -> None: - # Run executable - exec_cmd = Executable(self.task_config.APRUN_ATMENSANLFV3INC) - exec_cmd.add_default_arg(self.task_config.jedi_exe) - exec_cmd.add_default_arg(self.task_config.jedi_yaml) - - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") + self.jedi.execute(self.task_config, aprun_cmd, jedi_args) @logit(logger) - def finalize(self: Analysis) -> None: + def finalize(self) -> None: """Finalize a global atmens analysis This method will finalize a global atmens analysis using JEDI. This includes: - tar output diag files and place in ROTDIR - copy the generated YAML file from initialize to the ROTDIR - - write UFS model readable atm incrment file Parameters ---------- - Analysis: parent class for GDAS task + None Returns ---------- None """ + # ---- tar up diags # path of output tar statfile atmensstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.APREFIX}atmensstat") @@ -207,16 +232,20 @@ def finalize(self: Analysis) -> None: diaggzip = f"{diagfile}.gz" archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + # get list of yamls to cop to ROTDIR + yamls = glob.glob(os.path.join(self.task_config.DATA, '*atmens*yaml')) + # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") - src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") - logger.debug(f"Copying {src} to {dest}") - yaml_copy = { - 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS_ENS], - 'copy': [[src, dest]] - } - FileHandler(yaml_copy).sync() + for src in yamls: + logger.info(f"Copying {src} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") + yaml_base = os.path.splitext(os.path.basename(src))[0] + dest_yaml_name = f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.{yaml_base}.yaml" + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, dest_yaml_name) + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() # create template dictionaries template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL @@ -231,6 +260,7 @@ def finalize(self: Analysis) -> None: logger.info("Copy UFS model readable atm increment file") cdate = to_fv3time(self.task_config.current_cycle) cdate_inc = cdate.replace('.', '_') + # loop over ensemble members for imem in range(1, self.task_config.NMEM_ENS + 1): memchar = f"mem{imem:03d}" diff --git a/ush/python/pygfs/task/bmatrix.py b/ush/python/pygfs/task/bmatrix.py new file mode 100644 index 0000000000..d0edba2358 --- /dev/null +++ b/ush/python/pygfs/task/bmatrix.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import List, Dict, Any, Union + +from wxflow import (parse_j2yaml, FileHandler, logit, + Task, Executable, WorkflowException) + +logger = getLogger(__name__.split('.')[-1]) + + +class BMatrix(Task): + """Parent class for GDAS BMatrix tasks + + The BMatrix class is the parent class for all + Global Data Assimilation System (GDAS) BMatrix tasks + """ + def __init__(self, config: Dict[str, Any]) -> None: + super().__init__(config) + # Store location of GDASApp jinja2 templates + self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas') + + def initialize(self) -> None: + super().initialize() + + def finalize(self) -> None: + super().finalize() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py new file mode 100644 index 0000000000..982f74130c --- /dev/null +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -0,0 +1,430 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import Dict, List, Any +import netCDF4 as nc +import numpy as np + +from wxflow import (AttrDict, + FileHandler, + to_fv3time, to_timedelta, add_to_datetime, + rm_p, chdir, + parse_j2yaml, save_as_yaml, + Jinja, + logit, + Executable, + WorkflowException) +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class SnowEnsAnalysis(Analysis): + """ + Class for global ensemble snow analysis tasks + """ + + @logit(logger, name="SnowEnsAnalysis") + def __init__(self, config): + super().__init__(config) + + _res_det = int(self.task_config['CASE'][1:]) + _res_ens = int(self.task_config['CASE_ENS'][1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + _recenter_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.land_recenter.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res_ens + 1, + 'npy_ges': _res_ens + 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, + 'SNOW_WINDOW_BEGIN': _window_begin, + 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'jedi_yaml': _recenter_yaml, + } + ) + bkg_time = _window_begin if self.task_config.DOIAU else self.task_config.current_cycle + local_dict['bkg_time'] = bkg_time + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.task_config, **local_dict) + + @logit(logger) + def initialize(self) -> None: + """Initialize method for snow ensemble analysis + This method: + + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + super().initialize() + + # stage background and increment files + logger.info(f"Staging files from {self.task_config.SNOW_ENS_STAGE_TMPL}") + snow_stage_list = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) + FileHandler(snow_stage_list).sync() + + # stage orography files + logger.info(f"Staging orography files specified in {self.task_config.SNOW_OROG_STAGE_TMPL}") + snow_orog_stage_list = parse_j2yaml(self.task_config.SNOW_OROG_STAGE_TMPL, self.task_config) + FileHandler(snow_orog_stage_list).sync() + + # stage fix files for fv3-jedi + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_list).sync() + + # write land ensemble recentering YAML + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote recentering YAML to: {self.task_config.jedi_yaml}") + + # link recentering executable + # placeholder, currently already done by the analysis parent class + + # copy fregrid executable + fregrid_copy = {'copy': [[os.path.join(self.task_config.EXECgfs, 'fregrid'), os.path.join(self.task_config.DATA, 'fregrid.x')]]} + FileHandler(fregrid_copy).sync() + + @logit(logger) + def genWeights(self) -> None: + """Create a modified land_frac file for use by fregrid + to interpolate the snow background from det to ensres + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + # loop through tiles + for tile in range(1, self.task_config.ntiles + 1): + # open the restart and get the vegetation type + rst = nc.Dataset(f"./bkg/det/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc") + vtype = rst.variables['vtype'][:] + rst.close() + # open the oro data and get the land fraction + oro = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data.tile{tile}.nc") + land_frac = oro.variables['land_frac'][:] + oro.close() + # create an output file + ncfile = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight.tile{tile}.nc", mode='w', format='NETCDF4') + case_int = int(self.task_config.CASE[1:]) + lon = ncfile.createDimension('lon', case_int) + lat = ncfile.createDimension('lat', case_int) + lsm_frac_out = ncfile.createVariable('lsm_frac', np.float32, ('lon', 'lat')) + # set the land fraction to 0 on glaciers to not interpolate that snow + glacier = 15 + land_frac[np.where(vtype[0, ...] == glacier)] = 0 + lsm_frac_out[:] = land_frac + # write out and close the file + ncfile.close() + + @logit(logger) + def genMask(self) -> None: + """Create a mask for use by JEDI + to mask out snow increments on non-LSM gridpoints + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + # loop through tiles + for tile in range(1, self.task_config.ntiles + 1): + # open the restart and get the vegetation type + rst = nc.Dataset(f"./bkg/mem001/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc", mode="r+") + vtype = rst.variables['vtype'][:] + slmsk = rst.variables['slmsk'][:] + # slmsk(Time, yaxis_1, xaxis_1) + # set the mask to 3 on glaciers + glacier = 15 + slmsk[np.where(vtype == glacier)] = 3 + # write out and close the file + rst.variables['slmsk'][:] = slmsk + rst.close() + + @logit(logger) + def regridDetBkg(self) -> None: + """Run fregrid to regrid the deterministic snow background + to the ensemble resolution + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + arg_list = [ + "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", + "--input_dir", f"./bkg/det/", + "--input_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--scalar_field", f"snodl", + "--output_dir", f"./bkg/det_ensres/", + "--output_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + "--interp_method", f"conserve_order1", + "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", + "--weight_field", f"lsm_frac", + "--remap_file", f"./remap", + ] + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + exec_cmd = Executable(fregrid_exe) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd(*arg_list) + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + @logit(logger) + def regridDetInc(self) -> None: + """Run fregrid to regrid the deterministic snow increment + to the ensemble resolution + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + arg_list = [ + "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", + "--input_dir", f"./inc/det/", + "--input_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--scalar_field", f"snodl", + "--output_dir", f"./inc/det_ensres/", + "--output_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + "--interp_method", f"conserve_order1", + "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", + "--weight_field", f"lsm_frac", + "--remap_file", f"./remap", + ] + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + exec_cmd = Executable(fregrid_exe) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd(*arg_list) + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + @logit(logger) + def recenterEns(self) -> None: + """Run recentering code to create an ensemble of snow increments + based on the deterministic increment, and the difference + between the determinstic and ensemble mean forecast + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + logger.info("Running recentering code") + exec_cmd = Executable(self.task_config.APRUN_ESNOWRECEN) + exec_name = os.path.join(self.task_config.DATA, 'gdasapp_land_ensrecenter.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + @logit(logger) + def finalize(self) -> None: + """Performs closing actions of the snow ensemble analysis task + This method: + - copies the ensemble snow analyses to the proper locations + - copies the ensemble mean increment to COM + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + # save files to COM + logger.info(f"Copying files described in {self.task_config.SNOW_ENS_FINALIZE_TMPL}") + snow_final_list = parse_j2yaml(self.task_config.SNOW_ENS_FINALIZE_TMPL, self.task_config) + FileHandler(snow_final_list).sync() + + @logit(logger) + def addEnsIncrements(self) -> None: + """Loop through all ensemble members and apply increment to create + a surface analysis for snow + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + bkg_times = [] + # no matter what, we want to process the center of the window + bkg_times.append(self.task_config.current_cycle) + # if DOIAU, we need to copy the increment to be valid at the center of the window + # and compute the analysis there to restart the model + if self.task_config.DOIAU: + logger.info("Copying increments to beginning of window") + template_in = f'snowinc.{to_fv3time(self.task_config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' + template_out = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, 7): + filename_in = template_in.format(tilenum=itile) + filename_out = template_out.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_in) + dest = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_out) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() + # if running with IAU, we also need an analysis at the beginning of the window + bkg_times.append(self.task_config.SNOW_WINDOW_BEGIN) + + for bkg_time in bkg_times: + for mem in range(1, self.task_config.NMEM_ENS + 1): + # for now, just looping serially, should parallelize this eventually + logger.info(f"Now applying increment to member mem{mem:03}") + logger.info(f'{os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")}') + memdict = AttrDict( + { + 'HOMEgfs': self.task_config.HOMEgfs, + 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), + 'DATAROOT': self.task_config.DATA, + 'current_cycle': bkg_time, + 'CASE_ENS': self.task_config.CASE_ENS, + 'OCNRES': self.task_config.OCNRES, + 'ntiles': self.task_config.ntiles, + 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, + 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, + 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, + 'MYMEM': f"{mem:03}", + } + ) + self.add_increments(memdict) + + @staticmethod + @logit(logger) + def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) + that are needed for global snow DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + COMIN_ATMOS_RESTART_PREV + DATA + current_cycle + ntiles + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + + bkg_dict = { + 'mkdir': [], + 'copy': [], + } + return bkg_dict + + @staticmethod + @logit(logger) + def add_increments(config: Dict) -> None: + """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + HOMEgfs + DATA + DATAROOT + current_cycle + CASE + OCNRES + ntiles + APPLY_INCR_NML_TMPL + APPLY_INCR_EXE + APRUN_APPLY_INCR + + Raises + ------ + OSError + Failure due to OS issues + WorkflowException + All other exceptions + """ + os.chdir(config.DATA) + + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = config.ENS_APPLY_INCR_NML_TMPL + nml_data = Jinja(nml_template, config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(config.DATA, "apply_incr_nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = config.APPLY_INCR_EXE + exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") + + def get_obs_dict(self) -> Dict[str, Any]: + obs_dict = { + 'mkdir': [], + 'copy': [], + } + return obs_dict + + def get_bias_dict(self) -> Dict[str, Any]: + bias_dict = { + 'mkdir': [], + 'copy': [], + } + return bias_dict diff --git a/ush/python/pygfs/task/stage_ic.py b/ush/python/pygfs/task/stage_ic.py new file mode 100644 index 0000000000..37cc4138f3 --- /dev/null +++ b/ush/python/pygfs/task/stage_ic.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 + +import glob +import os +from logging import getLogger +from typing import Any, Dict, List + +from wxflow import (AttrDict, FileHandler, Task, cast_strdict_as_dtypedict, + logit, parse_j2yaml, strftime, to_YMD, + add_to_datetime, to_timedelta, Template, TemplateConstants) + +logger = getLogger(__name__.split('.')[-1]) + + +class Stage(Task): + """Task to stage initial conditions + """ + + @logit(logger, name="Stage") + def __init__(self, config: Dict[str, Any]) -> None: + """Constructor for the Stage task + The constructor is responsible for collecting necessary settings based on + the runtime options and RUN. + + Parameters + ---------- + config : Dict[str, Any] + Incoming configuration for the task from the environment + + Returns + ------- + None + """ + super().__init__(config) + + @logit(logger) + def execute_stage(self, stage_dict: Dict[str, Any]) -> None: + """Perform local staging of initial condition files. + + Parameters + ---------- + stage_dict : Dict[str, Any] + Configuration dictionary + + Returns + ------- + None + """ + + if not os.path.isdir(stage_dict.ROTDIR): + raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({stage_dict.ROTDIR}) does not exist!") + + # Add the os.path.exists function to the dict for yaml parsing + stage_dict['path_exists'] = os.path.exists + + # Add the glob.glob function for capturing filenames + stage_dict['glob'] = glob.glob + + # Parse stage yaml to get list of files to copy + stage_set = parse_j2yaml(self.task_config.STAGE_IC_YAML_TMPL, stage_dict, allow_missing=False) + + # Copy files to ROTDIR + for key in stage_set.keys(): + FileHandler(stage_set[key]).sync() diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py index 7e42e07c64..1b37b845f6 100644 --- a/ush/python/pygfs/task/upp.py +++ b/ush/python/pygfs/task/upp.py @@ -202,7 +202,7 @@ def index(cls, workdir: Union[str, os.PathLike], forecast_hour: int) -> None: template = f"GFS{{file_type}}.GrbF{forecast_hour:02d}" - for ftype in ['PRS', 'FLX']: + for ftype in ['PRS', 'FLX', 'GOES']: grbfile = template.format(file_type=ftype) grbfidx = f"{grbfile}.idx" diff --git a/versions/fix.ver b/versions/fix.ver index 3f85a45fee..7c18bea081 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -12,6 +12,7 @@ export gdas_fv3jedi_ver=20220805 export gdas_soca_ver=20240802 export gdas_gsibec_ver=20240416 export gdas_obs_ver=20240213 +export gdas_aero_ver=20240806 export glwu_ver=20220805 export gsi_ver=20240208 export lut_ver=20220805 diff --git a/versions/run.gaea.ver b/versions/run.gaea.ver index b92fe8c1db..c3aceb445d 100644 --- a/versions/run.gaea.ver +++ b/versions/run.gaea.ver @@ -2,5 +2,7 @@ export stack_intel_ver=2023.1.0 export stack_cray_mpich_ver=8.1.25 export spack_env=gsi-addon-dev +export perl_ver=5.38.2 + source "${HOMEgfs:-}/versions/run.spack.ver" export spack_mod_path="/ncrc/proj/epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.spack.ver b/versions/run.spack.ver index b19fe7dcdf..c3b5efa694 100644 --- a/versions/run.spack.ver +++ b/versions/run.spack.ver @@ -23,6 +23,7 @@ export py_jinja2_ver=3.1.2 export py_pandas_ver=1.5.3 export py_python_dateutil_ver=2.8.2 export py_f90nml_ver=1.4.3 +export py_numpy_ver=1.23.4 export met_ver=9.1.3 export metplus_ver=3.1.1 diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index 8c1f69735e..d6d7453c3c 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -172,7 +172,7 @@ def source_configs(self, run: str = "gfs", log: bool = True) -> Dict[str, Any]: files += ['config.fcst', 'config.efcs'] elif config in ['atmanlinit', 'atmanlvar', 'atmanlfv3inc']: files += ['config.atmanl', f'config.{config}'] - elif config in ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc']: + elif config in ['atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc']: files += ['config.atmensanl', f'config.{config}'] elif 'wave' in config: files += ['config.wave', f'config.{config}'] diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index e049a7d422..b8aa2dba3a 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -53,11 +53,11 @@ def _get_app_configs(self): if self.do_ocean or self.do_ice: configs += ['oceanice_products'] - configs += ['sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] + configs += ['stage_ic', 'sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] if self.do_hybvar: if self.do_jediatmens: - configs += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal'] + configs += ['atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal'] else: configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] @@ -107,12 +107,14 @@ def _get_app_configs(self): configs += ['waveawipsbulls', 'waveawipsgridded'] if self.do_aero: - configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + configs += ['aeroanlgenb', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] if self.do_prep_obs_aero: configs += ['prepobsaero'] if self.do_jedisnowda: configs += ['prepsnowobs', 'snowanl'] + if self.do_hybvar: + configs += ['esnowrecen'] if self.do_mos: configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -163,11 +165,14 @@ def get_task_names(self): hybrid_after_eupd_tasks = [] if self.do_hybvar: if self.do_jediatmens: - hybrid_tasks += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'echgres'] + hybrid_tasks += ['atmensanlinit', 'atmensanlfv3inc', 'atmensanlfinal', 'echgres'] + hybrid_tasks += ['atmensanlobs', 'atmensanlsol'] if self.lobsdiag_forenkf else ['atmensanlletkf'] else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] - hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] + if self.do_jedisnowda: + hybrid_tasks += ['esnowrecen'] + hybrid_after_eupd_tasks += ['stage_ic', 'ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] # Collect all "gdas" cycle tasks gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() @@ -179,11 +184,11 @@ def get_task_names(self): gdas_tasks += wave_prep_tasks if self.do_aero and 'gdas' in self.aero_anl_runs: - gdas_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + gdas_tasks += ['aeroanlgenb', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] if self.do_prep_obs_aero: gdas_tasks += ['prepobsaero'] - gdas_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] + gdas_tasks += ['stage_ic', 'atmanlupp', 'atmanlprod', 'fcst'] if self.do_upp: gdas_tasks += ['atmupp'] @@ -218,7 +223,7 @@ def get_task_names(self): gfs_tasks += wave_prep_tasks if self.do_aero and 'gfs' in self.aero_anl_runs: - gfs_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + gfs_tasks += ['aeroanlinit', 'aeroanlvar', 'aeroanlfinal'] if self.do_prep_obs_aero: gfs_tasks += ['prepobsaero'] @@ -297,6 +302,7 @@ def get_task_names(self): if self.do_hybvar and 'gfs' in self.eupd_runs: enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks enkfgfs_tasks.remove("echgres") + enkfgfs_tasks.remove("esnowrecen") tasks['enkfgfs'] = enkfgfs_tasks return tasks diff --git a/workflow/hosts.py b/workflow/hosts.py index eced460fd1..34ea067ade 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -16,7 +16,8 @@ class Host: """ SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', 'HERCULES', - 'WCOSS2', 'S4', 'CONTAINER', 'AWSPW', 'GAEA'] + 'WCOSS2', 'S4', 'CONTAINER', 'GAEA', + 'AWSPW', 'AZUREPW', 'GOOGLEPW'] def __init__(self, host=None): diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml index f925f54008..a9c708253e 100644 --- a/workflow/hosts/awspw.yaml +++ b/workflow/hosts/awspw.yaml @@ -18,7 +18,7 @@ CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. HPSSARCH: 'NO' HPSS_PROJECT: emc-global #TODO: See `ATARDIR` below. -BASE_CPLIC: '/bucket/global-workflow-shared-data/ICSDIR/prototype_ICs' +BASE_IC: '/bucket/global-workflow-shared-data/ICSDIR' LOCALARCH: 'NO' ATARDIR: '' # TODO: This will not yet work from AWS. MAKE_NSSTBUFR: 'NO' diff --git a/workflow/hosts/azurepw.yaml b/workflow/hosts/azurepw.yaml new file mode 100644 index 0000000000..2155c67dea --- /dev/null +++ b/workflow/hosts/azurepw.yaml @@ -0,0 +1,26 @@ +BASE_GIT: '' #TODO: This does not yet exist. +DMPDIR: '' # TODO: This does not yet exist. +PACKAGEROOT: '' #TODO: This does not yet exist. +COMINsyn: '' #TODO: This does not yet exist. +HOMEDIR: '/contrib/${USER}' +STMP: '/lustre/${USER}/stmp/' +PTMP: '/lustre/${USER}/ptmp/' +NOSCRUB: '${HOMEDIR}' +ACCOUNT: '${USER}' +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: compute +PARTITION_SERVICE: compute +RESERVATION: '' +CLUSTERS: '' +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global #TODO: See `ATARDIR` below. +BASE_CPLIC: '/bucket/global-workflow-shared-data/ICSDIR/prototype_ICs' +LOCALARCH: 'NO' +ATARDIR: '' # TODO: This will not yet work from AZURE. +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions. diff --git a/workflow/hosts/gaea.yaml b/workflow/hosts/gaea.yaml index 619a86f2e5..9297fed24a 100644 --- a/workflow/hosts/gaea.yaml +++ b/workflow/hosts/gaea.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/git' DMPDIR: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/dump' -BASE_CPLIC: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/ICSDIR/prototype_ICs' +BASE_IC: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/ICSDIR' PACKAGEROOT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/nwpara' COMROOT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/com' COMINsyn: '${COMROOT}/gfs/prod/syndat' diff --git a/workflow/hosts/googlepw.yaml b/workflow/hosts/googlepw.yaml new file mode 100644 index 0000000000..2bd9439d5f --- /dev/null +++ b/workflow/hosts/googlepw.yaml @@ -0,0 +1,26 @@ +BASE_GIT: '' #TODO: This does not yet exist. +DMPDIR: '' # TODO: This does not yet exist. +PACKAGEROOT: '' #TODO: This does not yet exist. +COMINsyn: '' #TODO: This does not yet exist. +HOMEDIR: '/contrib/${USER}' +STMP: '/lustre/${USER}/stmp/' +PTMP: '/lustre/${USER}/ptmp/' +NOSCRUB: '${HOMEDIR}' +ACCOUNT: '${USER}' +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: compute +PARTITION_SERVICE: compute +RESERVATION: '' +CLUSTERS: '' +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global #TODO: See `ATARDIR` below. +BASE_IC: '/bucket/global-workflow-shared-data/ICSDIR' +LOCALARCH: 'NO' +ATARDIR: '' # TODO: This will not yet work from GOOGLE. +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions. diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml index 731e583961..4ace199470 100644 --- a/workflow/hosts/hera.yaml +++ b/workflow/hosts/hera.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' -BASE_CPLIC: '/scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs' +BASE_IC: '/scratch1/NCEPDEV/global/glopara/data/ICSDIR' PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' COMINsyn: '/scratch1/NCEPDEV/global/glopara/com/gfs/prod/syndat' HOMEDIR: '/scratch1/NCEPDEV/global/${USER}' diff --git a/workflow/hosts/hercules.yaml b/workflow/hosts/hercules.yaml index b513bfd57a..9d6339a48e 100644 --- a/workflow/hosts/hercules.yaml +++ b/workflow/hosts/hercules.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/work/noaa/global/glopara/git_rocky9' DMPDIR: '/work/noaa/rstprod/dump' -BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs' +BASE_IC: '/work/noaa/global/glopara/data/ICSDIR' PACKAGEROOT: '/work/noaa/global/glopara/nwpara' COMINsyn: '/work/noaa/global/glopara/com/gfs/prod/syndat' HOMEDIR: '/work/noaa/global/${USER}' diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml index ae7267d687..21e815c9b2 100644 --- a/workflow/hosts/jet.yaml +++ b/workflow/hosts/jet.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/lfs4/HFIP/hfv3gfs/glopara/git' DMPDIR: '/lfs4/HFIP/hfv3gfs/glopara/dump' -BASE_CPLIC: '/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs' +BASE_IC: '/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR' PACKAGEROOT: '/lfs4/HFIP/hfv3gfs/glopara/nwpara' COMINsyn: '/lfs4/HFIP/hfv3gfs/glopara/com/gfs/prod/syndat' HOMEDIR: '/lfs4/HFIP/hfv3gfs/${USER}' diff --git a/workflow/hosts/orion.yaml b/workflow/hosts/orion.yaml index f0f807aacf..81daea6168 100644 --- a/workflow/hosts/orion.yaml +++ b/workflow/hosts/orion.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/work/noaa/global/glopara/git' DMPDIR: '/work/noaa/rstprod/dump' -BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs' +BASE_IC: '/work/noaa/global/glopara/data/ICSDIR' PACKAGEROOT: '/work/noaa/global/glopara/nwpara' COMINsyn: '/work/noaa/global/glopara/com/gfs/prod/syndat' HOMEDIR: '/work/noaa/global/${USER}' diff --git a/workflow/hosts/s4.yaml b/workflow/hosts/s4.yaml index aea807da63..c2af9728f2 100644 --- a/workflow/hosts/s4.yaml +++ b/workflow/hosts/s4.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/data/prod/glopara/git' DMPDIR: '/data/prod/glopara/dump' -BASE_CPLIC: '/data/prod/glopara/coupled_ICs' +BASE_IC: '/data/prod/glopara/coupled_ICs' PACKAGEROOT: '/data/prod/glopara/nwpara' COMINsyn: '/data/prod/glopara/com/gfs/prod/syndat' HOMEDIR: '/data/users/${USER}' diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml index 7ae2be1424..bf2cc41c45 100644 --- a/workflow/hosts/wcoss2.yaml +++ b/workflow/hosts/wcoss2.yaml @@ -1,6 +1,6 @@ BASE_GIT: '/lfs/h2/emc/global/save/emc.global/git' DMPDIR: '/lfs/h2/emc/dump/noscrub/dump' -BASE_CPLIC: '/lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/prototype_ICs' +BASE_IC: '/lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR' PACKAGEROOT: '${PACKAGEROOT:-"/lfs/h1/ops/prod/packages"}' COMINsyn: '/lfs/h1/ops/prod/com/gfs/v16.3/syndat' HOMEDIR: '/lfs/h2/emc/global/noscrub/${USER}' diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index f0f73d1173..70a39cea5a 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -10,70 +10,11 @@ def __init__(self, app_config: AppConfig, run: str) -> None: super().__init__(app_config, run) def stage_ic(self): - cpl_ic = self._configs['stage_ic'] - deps = [] - dtg_prefix = "@Y@m@d.@H0000" - offset = str(self._configs['base']['OFFSET_START_HOUR']).zfill(2) + ":00:00" - # Atm ICs - if self.app_config.do_atm: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos/" - if self._base['EXP_WARM_START']: - for file in ['fv_core.res.nc'] + \ - [f'{datatype}.tile{tile}.nc' - for datatype in ['ca_data', 'fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] - for tile in range(1, self.n_tiles + 1)]: - data = [prefix, f"{dtg_prefix}.{file}"] - dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} - deps.append(rocoto.add_dependency(dep_dict)) - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/med/" - data = [prefix, f"{dtg_prefix}.ufs.cpld.cpl.r.nc"] - dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} - deps.append(rocoto.add_dependency(dep_dict)) - else: - for file in ['gfs_ctrl.nc'] + \ - [f'{datatype}_data.tile{tile}.nc' - for datatype in ['gfs', 'sfc'] - for tile in range(1, self.n_tiles + 1)]: - data = f"{prefix}/{file}" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Ocean ICs - if self.app_config.do_ocean: - ocn_res = f"{self._base.get('OCNRES', '025'):03d}" - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean/" - data = [prefix, f"{dtg_prefix}.MOM.res.nc"] - dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} - deps.append(rocoto.add_dependency(dep_dict)) - if ocn_res in ['025']: - # 0.25 degree ocean model also has these additional restarts - for res in [f'res_{res_index}' for res_index in range(1, 4)]: - data = [prefix, f"{dtg_prefix}.MOM.{res}.nc"] - dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} - deps.append(rocoto.add_dependency(dep_dict)) - - # Ice ICs - if self.app_config.do_ice: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice/" - data = [prefix, f"{dtg_prefix}.cice_model.res.nc"] - dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} - deps.append(rocoto.add_dependency(dep_dict)) - - # Wave ICs - if self.app_config.do_wave: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave/" - for wave_grid in self._configs['waveinit']['waveGRD'].split(): - data = [prefix, f"{dtg_prefix}.restart.{wave_grid}"] - dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} - deps.append(rocoto.add_dependency(dep_dict)) - - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('stage_ic') task_name = f'stage_ic' task_dict = {'task_name': task_name, 'resources': resources, - 'dependency': dependencies, 'envars': self.envars, 'cycledef': 'gefs', 'command': f'{self.HOMEgfs}/jobs/rocoto/stage_ic.sh', @@ -383,12 +324,9 @@ def atmos_ensstat(self): def wavepostsbs(self): deps = [] - for wave_grid in self._configs['wavepostsbs']['waveGRD'].split(): - wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'}) - data = f'{wave_hist_path}/gefswave.out_grd.{wave_grid}.@Y@m@d.@H0000' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dep_dict = {'type': 'metatask', 'name': f'fcst_mem#member#'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) wave_post_envars = self.envars.copy() postenvar_dict = {'ENSMEM': '#member#', @@ -617,7 +555,7 @@ def arch(self): 'envars': self.envars, 'cycledef': 'gefs', 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/jobs/rocoto/arch_test.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/arch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 9d9b28fb17..89da933d00 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -18,65 +18,14 @@ def _is_this_a_gdas_task(run, task_name): # Specific Tasks begin here def stage_ic(self): - cpl_ic = self._configs['stage_ic'] - - deps = [] - - # Atm ICs - if self.app_config.do_atm: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/atmos" - for file in ['gfs_ctrl.nc'] + \ - [f'{datatype}_data.tile{tile}.nc' - for datatype in ['gfs', 'sfc'] - for tile in range(1, self.n_tiles + 1)]: - data = f"{prefix}/{file}" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - else: # data-atmosphere - # TODO - need more information about how these forcings are stored - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_DATM']}/@Y@m@d@H" - data = f"{prefix}/gefs.@Y@m.nc" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Ocean ICs - if self.app_config.do_ocean: - ocn_res = f"{self._base.get('OCNRES', '025'):03d}" - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocean" - data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - if ocn_res in ['025']: - # 0.25 degree ocean model also has these additional restarts - for res in [f'res_{res_index}' for res_index in range(1, 4)]: - data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Ice ICs - if self.app_config.do_ice: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/ice" - data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Wave ICs - if self.app_config.do_wave: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wave" - for wave_grid in self._configs['waveinit']['waveGRD'].split(): - data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + cycledef = 'gdas_half' if self.run in ['gdas', 'enkfgdas'] else self.run resources = self.get_resource('stage_ic') task_name = f'{self.run}stage_ic' task_dict = {'task_name': task_name, 'resources': resources, - 'dependency': dependencies, 'envars': self.envars, - 'cycledef': self.run, + 'cycledef': cycledef, 'command': f'{self.HOMEgfs}/jobs/rocoto/stage_ic.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', @@ -506,13 +455,41 @@ def prepobsaero(self): return task + def aeroanlgenb(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('aeroanlgenb') + task_name = f'{self.run}aeroanlgenb' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': 'gdas_half,gdas', + 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlgenb.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + def aeroanlinit(self): deps = [] + dep_dict = {'type': 'task', 'name': 'gdasaeroanlgenb', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_prep_obs_aero: dep_dict = {'type': 'task', 'name': f'{self.run}prepobsaero'} - deps.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('aeroanlinit') @@ -532,21 +509,28 @@ def aeroanlinit(self): return task - def aeroanlrun(self): + def aeroanlvar(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlinit'} + dep_dict = { + 'type': 'task', 'name': f'gdasaeroanlgenb', + 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}", + } deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) + dep_dict = { + 'type': 'task', 'name': f'{self.run}aeroanlinit', + } + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('aeroanlrun') - task_name = f'{self.run}aeroanlrun' + resources = self.get_resource('aeroanlvar') + task_name = f'{self.run}aeroanlvar' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlrun.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlvar.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -559,7 +543,7 @@ def aeroanlrun(self): def aeroanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlrun'} + dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlvar'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -627,6 +611,33 @@ def snowanl(self): task = rocoto.create_task(task_dict) return task + def esnowrecen(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prepsnowobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}snowanl'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': f'{self.run}epmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('esnowrecen') + task_name = f'{self.run}esnowrecen' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowrecen.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + return task + def prepoceanobs(self): ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) @@ -935,7 +946,7 @@ def _fcst_cycled(self): dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) if self.run in ['gdas']: - dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + dep_dict = {'type': 'task', 'name': f'{self.run}stage_ic'} dependencies.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) @@ -1177,12 +1188,9 @@ def _atmosoceaniceprod(self, component: str): def wavepostsbs(self): deps = [] - for wave_grid in self._configs['wavepostsbs']['waveGRD'].split(): - wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) - data = f'{wave_hist_path}/{self.run}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) resources = self.get_resource('wavepostsbs') task_name = f'{self.run}wavepostsbs' @@ -2478,6 +2486,58 @@ def atmensanlinit(self): return task + def atmensanlobs(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmensanlobs') + task_name = f'{self.run}atmensanlobs' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlobs.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + + def atmensanlsol(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmensanlsol') + task_name = f'{self.run}atmensanlsol' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlsol.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + def atmensanlletkf(self): deps = [] @@ -2507,7 +2567,10 @@ def atmensanlletkf(self): def atmensanlfv3inc(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlletkf'} + if self.app_config.lobsdiag_forenkf: + dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlsol'} + else: + dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlletkf'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} deps.append(rocoto.add_dependency(dep_dict)) @@ -2633,6 +2696,9 @@ def esfc(self): else: dep_dict = {'type': 'task', 'name': f'{self.run}eupd'} deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jedisnowda: + dep_dict = {'type': 'task', 'name': f'{self.run}esnowrecen'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('esfc') @@ -2660,7 +2726,7 @@ def efcs(self): dep_dict = {'type': 'task', 'name': f'{self.run}esfc'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + dep_dict = {'type': 'task', 'name': f'{self.run}stage_ic'} dependencies.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 64952498d4..d8d5edb5e6 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -19,9 +19,9 @@ class Tasks: 'ocnanalprep', 'marinebmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', - 'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', - 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', - 'prepsnowobs', 'snowanl', + 'atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', + 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal', 'aeroanlgenb', + 'prepsnowobs', 'snowanl', 'esnowrecen', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmos_prod', 'ocean_prod', 'ice_prod', diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py index ca54f3a5bb..d9ca4fb961 100644 --- a/workflow/rocoto/workflow_xml.py +++ b/workflow/rocoto/workflow_xml.py @@ -162,8 +162,7 @@ def _write_crontab(self, crontab_file: str = None, cronint: int = 5) -> None: # AWS need 'SHELL', and 'BASH_ENV' defined, or, the crontab job won't start. if os.environ.get('PW_CSP', None) in ['aws', 'azure', 'google']: strings.extend([f'SHELL="/bin/bash"', - f'BASH_ENV="/etc/bashrc"' - ]) + f'BASH_ENV="/etc/bashrc"']) strings.extend([f'{cronintstr} {rocotorunstr}', '#################################################################', '']) diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 3e70df0f02..e213394e20 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -29,235 +29,6 @@ def makedirs_if_missing(dirname): os.makedirs(dirname) -def fill_ROTDIR(host, inputs): - """ - Method to populate the ROTDIR for supported modes. - INPUTS: - host: host object from class Host - inputs: user inputs to setup_expt.py - """ - - fill_modes = { - 'cycled': fill_ROTDIR_cycled, - 'forecast-only': fill_ROTDIR_forecasts - } - - try: - fill_modes[inputs.mode](host, inputs) - except KeyError: - raise NotImplementedError(f'{inputs.mode} is not a supported mode.\n' + - 'Currently supported modes are:\n' + - f'{" | ".join(fill_modes.keys())}') - - return - - -def fill_ROTDIR_cycled(host, inputs): - """ - Implementation of 'fill_ROTDIR' for cycled mode - """ - - rotdir = os.path.join(inputs.comroot, inputs.pslot) - - do_ocean = do_ice = do_med = False - - if 'S2S' in inputs.app: - do_ocean = do_ice = do_med = True - - if inputs.icsdir is None: - warnings.warn("User did not provide '--icsdir' to stage initial conditions") - return - - rdatestr = datetime_to_YMDH(inputs.idate - to_timedelta('T06H')) - idatestr = datetime_to_YMDH(inputs.idate) - - # Test if we are using the new COM structure or the old flat one for ICs - if inputs.start in ['warm']: - pathstr = os.path.join(inputs.icsdir, f'{inputs.run}.{rdatestr[:8]}', - rdatestr[8:], 'model_data', 'atmos') - else: - pathstr = os.path.join(inputs.icsdir, f'{inputs.run}.{idatestr[:8]}', - idatestr[8:], 'model_data', 'atmos') - - if os.path.isdir(pathstr): - flat_structure = False - else: - flat_structure = True - - # Destination always uses the new COM structure - # These should match the templates defined in config.com - if inputs.start in ['warm']: - dst_atm_dir = os.path.join('model_data', 'atmos', 'restart') - dst_med_dir = os.path.join('model_data', 'med', 'restart') - else: - dst_atm_dir = os.path.join('model_data', 'atmos', 'input') - dst_med_dir = '' # no mediator files for a "cold start" - do_med = False - dst_ocn_rst_dir = os.path.join('model_data', 'ocean', 'restart') - dst_ocn_anl_dir = os.path.join('analysis', 'ocean') - dst_ice_rst_dir = os.path.join('model_data', 'ice', 'restart') - dst_ice_anl_dir = os.path.join('analysis', 'ice') - dst_atm_anl_dir = os.path.join('analysis', 'atmos') - - if flat_structure: - # ICs are in the old flat COM structure - if inputs.start in ['warm']: # This is warm start experiment - src_atm_dir = os.path.join('atmos', 'RESTART') - src_med_dir = os.path.join('med', 'RESTART') - elif inputs.start in ['cold']: # This is a cold start experiment - src_atm_dir = os.path.join('atmos', 'INPUT') - src_med_dir = '' # no mediator files for a "cold start" - do_med = False - # ocean and ice have the same filenames for warm and cold - src_ocn_rst_dir = os.path.join('ocean', 'RESTART') - src_ocn_anl_dir = 'ocean' - src_ice_rst_dir = os.path.join('ice', 'RESTART') - src_ice_anl_dir = dst_ice_anl_dir - src_atm_anl_dir = 'atmos' - else: - src_atm_dir = dst_atm_dir - src_med_dir = dst_med_dir - src_ocn_rst_dir = dst_ocn_rst_dir - src_ocn_anl_dir = dst_ocn_anl_dir - src_ice_rst_dir = dst_ice_rst_dir - src_ice_anl_dir = dst_ice_anl_dir - src_atm_anl_dir = dst_atm_anl_dir - - def link_files_from_src_to_dst(src_dir, dst_dir): - files = os.listdir(src_dir) - for fname in files: - os.symlink(os.path.join(src_dir, fname), - os.path.join(dst_dir, fname)) - return - - # Link ensemble member initial conditions - if inputs.nens > 0: - previous_cycle_dir = f'enkf{inputs.run}.{rdatestr[:8]}/{rdatestr[8:]}' - current_cycle_dir = f'enkf{inputs.run}.{idatestr[:8]}/{idatestr[8:]}' - - for ii in range(1, inputs.nens + 1): - memdir = f'mem{ii:03d}' - # Link atmospheric files - if inputs.start in ['warm']: - dst_dir = os.path.join(rotdir, previous_cycle_dir, memdir, dst_atm_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_atm_dir) - elif inputs.start in ['cold']: - dst_dir = os.path.join(rotdir, current_cycle_dir, memdir, dst_atm_dir) - src_dir = os.path.join(inputs.icsdir, current_cycle_dir, memdir, src_atm_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # Link ocean files - if do_ocean: - dst_dir = os.path.join(rotdir, previous_cycle_dir, memdir, dst_ocn_rst_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_ocn_rst_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # First 1/2 cycle needs a MOM6 increment - incfile = f'enkf{inputs.run}.t{idatestr[8:]}z.ocninc.nc' - src_file = os.path.join(inputs.icsdir, current_cycle_dir, memdir, src_ocn_anl_dir, incfile) - dst_file = os.path.join(rotdir, current_cycle_dir, memdir, dst_ocn_anl_dir, incfile) - makedirs_if_missing(os.path.join(rotdir, current_cycle_dir, memdir, dst_ocn_anl_dir)) - os.symlink(src_file, dst_file) - - # Link ice files - if do_ice: - dst_dir = os.path.join(rotdir, previous_cycle_dir, memdir, dst_ice_rst_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_ice_rst_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # Link mediator files - if do_med: - dst_dir = os.path.join(rotdir, previous_cycle_dir, memdir, dst_med_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_med_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # Link deterministic initial conditions - previous_cycle_dir = f'{inputs.run}.{rdatestr[:8]}/{rdatestr[8:]}' - current_cycle_dir = f'{inputs.run}.{idatestr[:8]}/{idatestr[8:]}' - - # Link atmospheric files - if inputs.start in ['warm']: - dst_dir = os.path.join(rotdir, previous_cycle_dir, dst_atm_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_atm_dir) - elif inputs.start in ['cold']: - dst_dir = os.path.join(rotdir, current_cycle_dir, dst_atm_dir) - src_dir = os.path.join(inputs.icsdir, current_cycle_dir, src_atm_dir) - - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # Link ocean files - if do_ocean: - dst_dir = os.path.join(rotdir, previous_cycle_dir, dst_ocn_rst_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_ocn_rst_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # First 1/2 cycle needs a MOM6 increment - incfile = f'{inputs.run}.t{idatestr[8:]}z.ocninc.nc' - src_file = os.path.join(inputs.icsdir, current_cycle_dir, src_ocn_anl_dir, incfile) - dst_file = os.path.join(rotdir, current_cycle_dir, dst_ocn_anl_dir, incfile) - makedirs_if_missing(os.path.join(rotdir, current_cycle_dir, dst_ocn_anl_dir)) - os.symlink(src_file, dst_file) - - # Link ice files - if do_ice: - # First 1/2 cycle needs a CICE6 analysis restart - src_dir = os.path.join(inputs.icsdir, current_cycle_dir, src_ice_anl_dir) - dst_dir = os.path.join(rotdir, current_cycle_dir, src_ice_anl_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # Link mediator files - if do_med: - dst_dir = os.path.join(rotdir, previous_cycle_dir, dst_med_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_med_dir) - makedirs_if_missing(dst_dir) - link_files_from_src_to_dst(src_dir, dst_dir) - - # Link bias correction and radiance diagnostics files - src_dir = os.path.join(inputs.icsdir, current_cycle_dir, src_atm_anl_dir) - dst_dir = os.path.join(rotdir, current_cycle_dir, dst_atm_anl_dir) - makedirs_if_missing(dst_dir) - for ftype in ['abias', 'abias_pc', 'abias_air', 'radstat']: - fname = f'{inputs.run}.t{idatestr[8:]}z.{ftype}' - src_file = os.path.join(src_dir, fname) - if os.path.exists(src_file): - os.symlink(src_file, os.path.join(dst_dir, fname)) - # First 1/2 cycle also needs a atmos increment if doing warm start - if inputs.start in ['warm']: - for ftype in ['atmi003.nc', 'atminc.nc', 'atmi009.nc']: - fname = f'{inputs.run}.t{idatestr[8:]}z.{ftype}' - src_file = os.path.join(src_dir, fname) - if os.path.exists(src_file): - os.symlink(src_file, os.path.join(dst_dir, fname)) - if inputs.nens > 0: - current_cycle_dir = f'enkf{inputs.run}.{idatestr[:8]}/{idatestr[8:]}' - for ii in range(1, inputs.nens + 1): - memdir = f'mem{ii:03d}' - src_dir = os.path.join(inputs.icsdir, current_cycle_dir, memdir, src_atm_anl_dir) - dst_dir = os.path.join(rotdir, current_cycle_dir, memdir, dst_atm_anl_dir) - makedirs_if_missing(dst_dir) - for ftype in ['ratmi003.nc', 'ratminc.nc', 'ratmi009.nc']: - fname = f'enkf{inputs.run}.t{idatestr[8:]}z.{ftype}' - src_file = os.path.join(src_dir, fname) - if os.path.exists(src_file): - os.symlink(src_file, os.path.join(dst_dir, fname)) - - return - - -def fill_ROTDIR_forecasts(host, inputs): - """ - Implementation of 'fill_ROTDIR' for forecast-only mode - """ - print('forecast-only mode treats ICs differently and cannot be staged here') - - def fill_EXPDIR(inputs): """ Method to copy config files from workflow to experiment directory @@ -294,7 +65,17 @@ def _update_defaults(dict_in: dict) -> dict: # First update config.base edit_baseconfig(host, inputs, yaml_dict) - # loop over other configs and update them + # Update stage config + stage_dict = { + "@ICSDIR@": inputs.icsdir + } + host_dict = get_template_dict(host.info) + stage_dict = dict(stage_dict, **host_dict) + stage_input = f'{inputs.configdir}/config.stage_ic' + stage_output = f'{inputs.expdir}/{inputs.pslot}/config.stage_ic' + edit_config(stage_input, stage_output, stage_dict) + + # Loop over other configs and update them with defaults for cfg in yaml_dict.keys(): if cfg == 'base': continue @@ -419,6 +200,7 @@ def _common_args(parser): parser.add_argument('--idate', help='starting date of experiment, initial conditions must exist!', required=True, type=lambda dd: to_datetime(dd)) parser.add_argument('--edate', help='end date experiment', required=True, type=lambda dd: to_datetime(dd)) + parser.add_argument('--icsdir', help='full path to user initial condition directory', type=str, required=False, default='') parser.add_argument('--overwrite', help='overwrite previously created experiment (if it exists)', action='store_true', required=False) return parser @@ -435,7 +217,6 @@ def _gfs_args(parser): return parser def _gfs_cycled_args(parser): - parser.add_argument('--icsdir', help='full path to initial condition directory', type=str, required=False, default=None) parser.add_argument('--app', help='UFS application', type=str, choices=ufs_apps, required=False, default='ATM') parser.add_argument('--gfs_cyc', help='cycles to run forecast', type=int, @@ -463,8 +244,6 @@ def _gefs_args(parser): default=os.path.join(_top, 'parm/config/gefs')) parser.add_argument('--yaml', help='Defaults to substitute from', type=str, required=False, default=os.path.join(_top, 'parm/config/gefs/yaml/defaults.yaml')) - parser.add_argument('--icsdir', help='full path to initial condition directory [temporary hack in place for testing]', - type=str, required=False, default=None) return parser description = """ @@ -582,7 +361,6 @@ def main(*argv): if create_rotdir: makedirs_if_missing(rotdir) - fill_ROTDIR(host, user_inputs) if create_expdir: makedirs_if_missing(expdir)