diff --git a/parm/FV3.input.yml b/parm/FV3.input.yml deleted file mode 100644 index efb6c85f5b..0000000000 --- a/parm/FV3.input.yml +++ /dev/null @@ -1,546 +0,0 @@ -# This configuration file maintains the modifications that need to be -# made to the base FV3 namelist specified in -# -# parm/input.nml.FV3 -# -# to obtain the namelist for each physics suite that the SRW App can -# run with. - - -FV3_RRFS_v1beta: - gfs_physics_nml: &RRFS_v1beta_phys - do_deep: False - do_mynnsfclay: True - imfdeepcnv: -1 - imfshalcnv: -1 - iopt_alb: 2 - iopt_btr: 1 - iopt_crs: 1 - iopt_dveg: 2 - iopt_frz: 1 - iopt_inf: 1 - iopt_rad: 1 - iopt_run: 1 - iopt_sfc: 1 - iopt_snf: 4 - iopt_stc: 1 - iopt_tbot: 2 - iopt_trs: 2 - lsm: 2 - lsoil_lsm: 4 -FV3_WoFS_v0: - gfs_physics_nml: - do_deep: False - imfdeepcnv: 0 - imfshalcnv: 0 - iopt_alb: 2 - iopt_btr: 1 - iopt_crs: 1 - iopt_dveg: 2 - iopt_frz: 1 - iopt_inf: 1 - iopt_rad: 1 - iopt_run: 1 - iopt_sfc: 1 - iopt_snf: 4 - iopt_stc: 1 - iopt_tbot: 2 - do_mynnsfclay: True - imfdeepcnv: -1 - imfshalcnv: -1 - lsm: 1 - lsoil_lsm: 4 - imp_physics: 17 - nssl_cccn: 0.6e+9 - nssl_hail_on: True - nssl_ccn_on: True - fv_core_nml: - nwat: 7 - fv_diagnostics_nml: - do_hailcast: True - -FV3_HRRR: - fv_core_nml: &HRRR_fv_core - hord_dp: 6 - hord_mt: 6 - hord_tm: 6 - hord_vt: 6 - hord_tr: 8 - kord_mt: 9 - kord_tm: -9 - kord_tr: 9 - kord_wz: 9 - nord_tr: 0 - nrows_blend: 20 - d_con: 0.5 - n_sponge: 9 - gfs_physics_nml: - <<: *RRFS_v1beta_phys - cdmbgwd: [3.5, 1.0] - do_mynnsfclay: True - do_sfcperts: null - gwd_opt: 3 - do_gsl_drag_ss: True - do_gsl_drag_tofd: True - do_gsl_drag_ls_bl: True - iaer: 5111 - icliq_sw: 2 - iovr: 3 - lsm: 3 - lsoil_lsm: 9 - sfclay_compute_flux: True - diag_log: True - ialb: 2 - iems: 2 - isncond_opt: 2 - isncovr_opt: 3 - mosaic_lu: 0 - mosaic_soil: 0 - thsfc_loc: False - nst_anl: null - nstf_name: null - -FV3_RAP: - fv_core_nml: - <<: *HRRR_fv_core - gfs_physics_nml: - <<: *RRFS_v1beta_phys - cdmbgwd: [3.5, 1.0] - do_mynnsfclay: True - do_sfcperts: null - gwd_opt: 3 - do_gsl_drag_ss: True - do_gsl_drag_tofd: True - do_gsl_drag_ls_bl: True - iaer: 5111 - icliq_sw: 2 - iovr: 3 - lsm: 3 - lsoil_lsm: 9 - sfclay_compute_flux: False - do_deep: True - shal_cnv: True - imfdeepcnv: 3 - imfshalcnv: 3 - -FV3_GFS_2017_gfdlmp: - atmos_model_nml: - avg_max_length: 3600.0 - fv_core_nml: &gfs_2017_gfdlmp_fv_core - agrid_vel_rst: False - d4_bg: 0.15 - delt_max: 0.008 - do_sat_adj: True - fv_debug: False - k_split: 6 - n_split: 6 - nord: 2 - nord_zs_filter: null - range_warn: False - vtdm4: 0.075 - gfs_physics_nml: &gfs_2017_gfdlmp_phys - avg_max_length: 3600.0 - bl_mynn_tkeadvect: null - bl_mynn_edmf: null - bl_mynn_edmf_mom: null - cdmbgwd: [3.5, 0.01] - cplflx: null - do_deep: False - do_mynnedmf: null - do_mynnsfclay: null - fhcyc: 0.0 - fhlwr: 3600.0 - fhswr: 3600.0 - hybedmf: True - icloud_bl: null - imfdeepcnv: 2 - imfshalcnv: 2 - imp_physics: 11 - lgfdlmprad: True - lheatstrg: null - lndp_type: null - lsm: null - lsoil: null - lsoil_lsm: null - ltaerosol: null - n_var_lndp: null - oz_phys: True - oz_phys_2015: False - satmedmf: null - shal_cnv: True - ttendlim: null - gfdl_cloud_microphysics_nml: &gfs_gfdl_cloud_mp - c_cracw: 0.8 - c_paut: 0.5 - c_pgacs: 0.01 - c_psaci: 0.05 - ccn_l: 300.0 - ccn_o: 100.0 - const_vg: False - const_vi: False - const_vr: False - const_vs: False - de_ice: False - do_qa: True - do_sedi_heat: False - dw_land: 0.16 - dw_ocean: 0.1 - fast_sat_adj: True - fix_negative: True - icloud_f: 1 - mono_prof: True - mp_time: 90.0 - prog_ccn: False - qi0_crt: 8.0e-05 - qi_lim: 1.0 - ql_gen: 0.001 - ql_mlt: 0.001 - qs0_crt: 0.001 - rad_graupel: True - rad_rain: True - rad_snow: True - rh_inc: 0.3 - rh_inr: 0.3 - rh_ins: 0.3 - rthresh: 1.0e-05 - sedi_transport: False - tau_g2v: 900.0 - tau_i2s: 1000.0 - tau_l2v: 180.0 - tau_v2l: 90.0 - use_ccn: True - use_ppm: False - vg_max: 12.0 - vi_max: 1.0 - vr_max: 12.0 - vs_max: 2.0 - z_slope_ice: True - z_slope_liq: True - -FV3_GFS_2017_gfdlmp_regional: - atmos_model_nml: - avg_max_length: 3600.0 - fv_core_nml: - <<: *gfs_2017_gfdlmp_fv_core - k_split: 2 - gfs_physics_nml: - <<: *gfs_2017_gfdlmp_phys - cplflx: False - effr_in: False - iopt_alb: 2 - iopt_btr: 1 - iopt_crs: 1 - iopt_dveg: 2 - iopt_frz: 1 - iopt_inf: 1 - iopt_rad: 1 - iopt_run: 1 - iopt_sfc: 1 - iopt_snf: 4 - iopt_stc: 1 - iopt_tbot: 2 - iopt_trs: 2 - lgfdlmprad: True - lheatstrg: False - lndp_type: 0 - lsm: 1 - n_var_lndp: 0 - nstf_name: [2, 0, 0, 0, 0] - oz_phys: False - oz_phys_2015: True - satmedmf: False - gfdl_cloud_microphysics_nml: - <<: *gfs_gfdl_cloud_mp - -FV3_GFS_v15p2: - fv_core_nml: &gfs_v15_fv_core - agrid_vel_rst: False - d2_bg_k1: 0.15 - d2_bg_k2: 0.02 - do_sat_adj: True - fv_debug: False - fv_sg_adj: 600 - k_split: 1 - kord_mt: 9 - kord_tm: -9 - kord_tr: 9 - kord_wz: 9 - n_split: 8 - n_sponge: 30 - nord_zs_filter: null - nudge_qv: True - range_warn: False - rf_cutoff: 750.0 - rf_fast: False - gfdl_cloud_microphysics_nml: - <<: *gfs_gfdl_cloud_mp - sedi_transport: True - tau_l2v: 225.0 - tau_v2l: 150.0 - gfs_physics_nml: &gfs_v15_gfs_physics - bl_mynn_edmf: null - bl_mynn_edmf_mom: null - bl_mynn_tkeadvect: null - cnvcld: True - cnvgwd: True - cplflx: null - do_myjpbl: False - do_myjsfc: False - do_mynnedmf: null - do_mynnsfclay: null - do_tofd: False - do_ugwp: False - do_ysu: False - fhcyc: 0.0 - fhlwr: 3600.0 - fhswr: 3600.0 - hybedmf: True - iau_delthrs: null - iaufhrs: null - imfdeepcnv: 2 - imfshalcnv: 2 - imp_physics: 11 - icloud_bl: null - iopt_alb: 2 - iopt_btr: 1 - iopt_crs: 1 - iopt_dveg: 2 - iopt_frz: 1 - iopt_inf: 1 - iopt_rad: 1 - iopt_run: 1 - iopt_sfc: 1 - iopt_snf: 4 - iopt_stc: 1 - iopt_tbot: 2 - iopt_trs: 2 - ldiag_ugwp: False - lgfdlmprad: True - lradar: null - lsm: 1 - lsoil: null - lsoil_lsm: null - ltaerosol: null - shal_cnv: True - shinhong: False - ttendlim: null - xkzm_h: 1.0 - xkzm_m: 1.0 - xkzminv: 0.3 - namsfc: - landice: True - ldebug: False - surf_map_nml: null - -FV3_GFS_v15_thompson_mynn_lam3km: - atmos_model_nml: - avg_max_length: 3600.0 - fv_core_nml: - agrid_vel_rst: True - full_zs_filter: null - n_sponge: 9 - npz_type: '' - rf_fast: False - sg_cutoff: 10000.0 - vtdm4: 0.02 - gfs_physics_nml: - avg_max_length: 3600.0 - cdmbgwd: [0.88, 0.04] - debug: True - do_deep: False - do_gsl_drag_ls_bl: False - do_gsl_drag_ss: True - do_gsl_drag_tofd: True - do_mynnsfclay: True - do_tofd: False - do_ugwp: False - do_ugwp_v0: False - do_ugwp_v0_nst_only: False - do_ugwp_v0_orog_only: False - fhswr: 900.0 - fhlwr: 900.0 - gwd_opt: 2 - iaer: 1011 - iccn: 2 - icliq_sw: 2 - imfdeepcnv: 2 - imfshalcnv: 2 - iopt_alb: 2 - iopt_btr: 1 - iopt_crs: 1 - iopt_dveg: 2 - iopt_frz: 1 - iopt_inf: 1 - iopt_rad: 1 - iopt_run: 1 - iopt_sfc: 1 - iopt_snf: 4 - iopt_stc: 1 - iopt_tbot: 2 - iopt_trs: null - iovr: 3 - ldiag_ugwp: False - lgfdlmprad: False - lsm: 1 - lsoil: null - lsoil_lsm: null - ltaerosol: False - print_diff_pgr: True - sfclay_compute_flux: null - xkzminv: 0.3 - xkzm_m: 1.0 - xkzm_h: 1.0 - surf_map_nml: null - -FV3_GFS_v16: - cires_ugwp_nml: - launch_level: 27 - fv_core_nml: - <<: *gfs_v15_fv_core - agrid_vel_rst: False - d2_bg_k1: 0.2 - d2_bg_k2: 0.0 - delt_max: 0.002 - dz_min: 6 - fv_sg_adj: 450 - hord_dp: -5 - hord_mt: 5 - hord_tm: 5 - hord_vt: 5 - k_split: 6 - make_nh: False - n_split: 6 - n_sponge: 10 - na_init: 0 - nudge_dz: False - res_latlon_dynamics: '' - rf_fast: null - tau: 10.0 - gfdl_cloud_microphysics_nml: - <<: *gfs_gfdl_cloud_mp - mp_time: 150.0 - reiflag: 2 - sedi_transport: True - tau_l2v: 225.0 - tau_v2l: 150.0 - gfs_physics_nml: - <<: *gfs_v15_gfs_physics - cdmbgwd: [4.0, 0.15, 1.0, 1.0] - do_myjpbl: null - do_myjsfc: null - do_tofd: True - do_ysu: null - hybedmf: False - iaer: 5111 - icliq_sw: 2 - iopt_dveg: 1 - iovr: 3 - isatmedmf: 1 - lgfdlmprad: True - lheatstrg: True - lndp_type: null - lsoil: 4 - n_var_lndp: null - prautco: [0.00015, 0.00015] - psautco: [0.0008, 0.0005] - satmedmf: True - shinhong: null - xkzminv: null - xkzm_m: null - xkzm_h: null - mpp_io_nml: - deflate_level: 1 - shuffle: 1 - namsfc: - landice: True - ldebug: False - surf_map_nml: null - -FV3_GFS_v17_p8: - cires_ugwp_nml: - launch_level: 27 - fv_core_nml: - <<: *gfs_v15_fv_core - agrid_vel_rst: False - d2_bg_k1: 0.2 - d2_bg_k2: 0.0 - dnats: 0 - do_sat_adj: False - fv_sg_adj: 450 - hord_dp: -5 - hord_mt: 5 - hord_tm: 5 - hord_tr: 8 - hord_vt: 5 - k_split: 6 - make_nh: True - n_split: 6 - n_sponge: 10 - na_init: 1 - nord: 1 - nudge_dz: False - res_latlon_dynamics: '' - rf_fast: null - tau: 10.0 - gfs_physics_nml: - cdmbgwd: [4.0, 0.05, 1.0, 1.0] - cnvcld: True - cnvgwd: True - decfl: 10 - do_deep: True - do_gsl_drag_ls_bl: False - do_gsl_drag_ss: True - do_gsl_drag_tofd: False - do_mynnedmf: False - do_mynnsfclay: False - do_tofd: False - do_ugwp: False - do_ugwp_v0: True - do_ugwp_v0_orog_only: False - do_ugwp_v0_nst_only: False - do_ugwp_v1: False - do_ugwp_v1_orog_only: False - dt_inner: 150.0 - fhlwr: 1200.0 - fhswr: 1200.0 - frac_grid: False - gwd_opt: 2 - iaer: 1011 - ialb: 2 - icliq_sw: 2 - iems: 2 - imfdeepcnv: 2 - imfshalcnv: 2 - iopt_alb: 1 - iopt_btr: 1 - iopt_crs: 2 - iopt_dveg: 4 - iopt_frz: 1 - iopt_inf: 1 - iopt_rad: 3 - iopt_run: 1 - iopt_sfc: 3 - iopt_snf: 4 - iopt_stc: 3 - iopt_tbot: 2 - iovr: 3 - isatmedmf: 1 - ldiag_ugwp: False - lseaspray: True - lgfdlmprad: False - lheatstrg: False - lradar: False - lsm: 2 - lsoil_lsm: 4 - ltaerosol: False - min_lakeice: 0.15 - min_seaice: 0.15 - qdiag3d: False - ras: False - satmedmf: True - sedi_semi: True - shal_cnv: True - mpp_io_nml: - deflate_level: 1 - shuffle: 1 - surf_map_nml: null diff --git a/parm/diag_table.FV3_GFS_v15_thompson_mynn_lam3km b/parm/diag_table.FV3_GFS_v15_thompson_mynn_lam3km index 6dc1f4f140..6d8e4fa412 100644 --- a/parm/diag_table.FV3_GFS_v15_thompson_mynn_lam3km +++ b/parm/diag_table.FV3_GFS_v15_thompson_mynn_lam3km @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} #output files "grid_spec", -1, "months", 1, "days", "time" diff --git a/parm/diag_table.FV3_GFS_v15p2 b/parm/diag_table.FV3_GFS_v15p2 index 6a5c63e3f8..da2123c0e1 100755 --- a/parm/diag_table.FV3_GFS_v15p2 +++ b/parm/diag_table.FV3_GFS_v15p2 @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" "atmos_4xdaily", 6, "hours", 1, "days", "time" diff --git a/parm/diag_table.FV3_GFS_v16 b/parm/diag_table.FV3_GFS_v16 index 6a5c63e3f8..da2123c0e1 100755 --- a/parm/diag_table.FV3_GFS_v16 +++ b/parm/diag_table.FV3_GFS_v16 @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" "atmos_4xdaily", 6, "hours", 1, "days", "time" diff --git a/parm/diag_table.FV3_GFS_v17_p8 b/parm/diag_table.FV3_GFS_v17_p8 index 9b03f316a7..9d6bbb6dff 100644 --- a/parm/diag_table.FV3_GFS_v17_p8 +++ b/parm/diag_table.FV3_GFS_v17_p8 @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} #output files "grid_spec", -1, "months", 1, "days", "time" diff --git a/parm/diag_table.FV3_HRRR b/parm/diag_table.FV3_HRRR index 893ca25a2b..fa77e66d06 100755 --- a/parm/diag_table.FV3_HRRR +++ b/parm/diag_table.FV3_HRRR @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" "atmos_static", -1, "hours", 1, "hours", "time" diff --git a/parm/diag_table.FV3_RAP b/parm/diag_table.FV3_RAP index 43bce10ab0..f1b49fabdd 100644 --- a/parm/diag_table.FV3_RAP +++ b/parm/diag_table.FV3_RAP @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" "atmos_4xdaily", 6, "hours", 1, "days", "time" diff --git a/parm/diag_table.FV3_RRFS_v1beta b/parm/diag_table.FV3_RRFS_v1beta index 30bf673ef7..fed189c689 100755 --- a/parm/diag_table.FV3_RRFS_v1beta +++ b/parm/diag_table.FV3_RRFS_v1beta @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" "atmos_static", -1, "hours", 1, "hours", "time" diff --git a/parm/diag_table.FV3_WoFS_v0 b/parm/diag_table.FV3_WoFS_v0 index e65dd80a7b..b2065484a8 100644 --- a/parm/diag_table.FV3_WoFS_v0 +++ b/parm/diag_table.FV3_WoFS_v0 @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" "atmos_static", -1, "hours", 1, "hours", "time" diff --git a/parm/diag_table_aqm.FV3_GFS_v15p2 b/parm/diag_table_aqm.FV3_GFS_v15p2 index a58303449f..db5de110a9 100644 --- a/parm/diag_table_aqm.FV3_GFS_v15p2 +++ b/parm/diag_table_aqm.FV3_GFS_v15p2 @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" #"atmos_4xdaily", 6, "hours", 1, "days", "time" diff --git a/parm/diag_table_aqm.FV3_GFS_v16 b/parm/diag_table_aqm.FV3_GFS_v16 index 1513e8ea74..74460fbb5e 100644 --- a/parm/diag_table_aqm.FV3_GFS_v16 +++ b/parm/diag_table_aqm.FV3_GFS_v16 @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} "grid_spec", -1, "months", 1, "days", "time" #"atmos_4xdaily", 6, "hours", 1, "days", "time" diff --git a/parm/diag_table_aqm.FV3_GFS_v17_p8 b/parm/diag_table_aqm.FV3_GFS_v17_p8 index 6aa4ceb34e..55fb07987f 100644 --- a/parm/diag_table_aqm.FV3_GFS_v17_p8 +++ b/parm/diag_table_aqm.FV3_GFS_v17_p8 @@ -1,5 +1,5 @@ -{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional -{{ starttime.strftime("%Y %m %d %H %M %S") }} +{{ cycle.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional +{{ cycle.strftime("%Y %m %d %H %M %S") }} #output files "grid_spec", -1, "months", 1, "days", "time" diff --git a/parm/fixed_files_mapping.yaml b/parm/fixed_files_mapping.yaml index 49d3191de5..3be45e0ce4 100644 --- a/parm/fixed_files_mapping.yaml +++ b/parm/fixed_files_mapping.yaml @@ -2,200 +2,35 @@ fixed_files: # #----------------------------------------------------------------------- # - # FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: - # This array is used to set some of the namelist variables in the forecast - # model's namelist file that represent the relative or absolute paths of - # various fixed files (the first column of the array, where columns are - # delineated by the pipe symbol "|") to the full paths to surface climatology - # files (on the native FV3-LAM grid) in the FIXlam directory derived from - # the corresponding surface climatology fields (the second column of the - # array). + # These are the names of the fields generated by the make_sfc_climo + # task. # #----------------------------------------------------------------------- # - FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: [ - "FNALBC | snowfree_albedo", - "FNALBC2 | facsf", - "FNTG3C | substrate_temperature", - "FNVEGC | vegetation_greenness", - "FNVETC | vegetation_type", - "FNSOTC | soil_type", - "FNVMNC | vegetation_greenness", - "FNVMXC | vegetation_greenness", - "FNSLPC | slope_type", - "FNABSC | maximum_snow_albedo" - ] - - # - #----------------------------------------------------------------------- - # - # Set the array parameter containing the names of all the fields that the - # TN_MAKE_SFC_CLIMO task generates on the native FV3-LAM grid. - # - #----------------------------------------------------------------------- - # - SFC_CLIMO_FIELDS: [ - "facsf", - "maximum_snow_albedo", - "slope_type", - "snowfree_albedo", - "soil_type", - "substrate_temperature", - "vegetation_greenness", - "vegetation_type", - ] - - # - #----------------------------------------------------------------------- - # - # FNGLAC, ..., FNMSKH: - # Names of (some of the) global data files that are assumed to exist in - # a system directory specified (this directory is machine-dependent; - # the experiment generation scripts will set it and store it in the - # variable FIXgsm). These file names also appear directly in the forecast - # model's input namelist file. - # - #----------------------------------------------------------------------- - # - FNGLAC: &FNGLAC "global_glacier.2x2.grb" - FNMXIC: &FNMXIC "global_maxice.2x2.grb" - FNTSFC: &FNTSFC "RTGSST.1982.2012.monthly.clim.grb" - FNSNOC: &FNSNOC "global_snoclim.1.875.grb" - FNZORC: &FNZORC "igbp" - FNAISC: &FNAISC "CFSR.SEAICE.1982.2012.monthly.clim.grb" - FNSMCC: &FNSMCC "global_soilmgldas.t126.384.190.grb" - FNMSKH: &FNMSKH "seaice_newland.grb" - # - #----------------------------------------------------------------------- - # - # FIXgsm_FILES_TO_COPY_TO_FIXam: - # If not running in NCO mode, this array contains the names of the files - # to copy from the FIXgsm system directory to the FIXam directory under - # the experiment directory. Note that the last element has a dummy value. - # This last element will get reset by the workflow generation scripts to - # the name of the ozone production/loss file to copy from FIXgsm. The - # name of this file depends on the ozone parameterization being used, - # and that in turn depends on the CCPP physics suite specified for the - # experiment. Thus, the CCPP physics suite XML must first be read in to - # determine the ozone parameterizaton and then the name of the ozone - # production/loss file. These steps are carried out elsewhere (in one - # of the workflow generation scripts/functions). - # - #----------------------------------------------------------------------- - # - FIXgsm_FILES_TO_COPY_TO_FIXam: [ - *FNGLAC, - *FNMXIC, - *FNTSFC, - *FNSNOC, - *FNAISC, - *FNSMCC, - *FNMSKH, - "global_climaeropac_global.txt", - "fix_co2_proj/global_co2historicaldata_2010.txt", - "fix_co2_proj/global_co2historicaldata_2011.txt", - "fix_co2_proj/global_co2historicaldata_2012.txt", - "fix_co2_proj/global_co2historicaldata_2013.txt", - "fix_co2_proj/global_co2historicaldata_2014.txt", - "fix_co2_proj/global_co2historicaldata_2015.txt", - "fix_co2_proj/global_co2historicaldata_2016.txt", - "fix_co2_proj/global_co2historicaldata_2017.txt", - "fix_co2_proj/global_co2historicaldata_2018.txt", - "fix_co2_proj/global_co2historicaldata_2019.txt", - "fix_co2_proj/global_co2historicaldata_2020.txt", - "fix_co2_proj/global_co2historicaldata_2021.txt", - "global_co2historicaldata_glob.txt", - "co2monthlycyc.txt", - "global_h2o_pltc.f77", - "global_hyblev.l65.txt", - "global_zorclim.1x1.grb", - "global_sfc_emissivity_idx.txt", - "global_tg3clim.2.6x1.5.grb", - "global_solarconstant_noaa_an.txt", - "global_albedo4.1x1.grb", - "geo_em.d01.lat-lon.2.5m.HGT_M.nc", - "HGT.Beljaars_filtered.lat-lon.30s_res.nc", - "replace_with_FIXgsm_ozone_prodloss_filename" - ] - # - #----------------------------------------------------------------------- - # - # FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING: - # This array is used to set some of the namelist variables in the forecast - # model's namelist file that represent the relative or absolute paths of - # various fixed files (the first column of the array, where columns are - # delineated by the pipe symbol "|") to the full paths to these files in - # the FIXam directory derived from the corresponding workflow variables - # containing file names (the second column of the array). - # - #----------------------------------------------------------------------- - # - FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING: [ - !join_str ["FNGLAC | ",*FNGLAC], - !join_str ["FNMXIC | ",*FNMXIC], - !join_str ["FNTSFC | ",*FNTSFC], - !join_str ["FNSNOC | ",*FNSNOC], - !join_str ["FNAISC | ",*FNAISC], - !join_str ["FNSMCC | ",*FNSMCC], - !join_str ["FNMSKH | ",*FNMSKH] - ] - - # - #----------------------------------------------------------------------- - # - # CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING: - # This array specifies the mapping to use between the symlinks that need - # to be created in each cycle directory (these are the "files" that FV3 - # looks for) and their targets in the FIXam directory. The first column - # of the array specifies the symlink to be created, and the second column - # specifies its target file in FIXam (where columns are delineated by the - # pipe symbol "|"). - # - #----------------------------------------------------------------------- - # - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING: [ - "aerosol.dat | global_climaeropac_global.txt", - "co2historicaldata_2010.txt | fix_co2_proj/global_co2historicaldata_2010.txt", - "co2historicaldata_2011.txt | fix_co2_proj/global_co2historicaldata_2011.txt", - "co2historicaldata_2012.txt | fix_co2_proj/global_co2historicaldata_2012.txt", - "co2historicaldata_2013.txt | fix_co2_proj/global_co2historicaldata_2013.txt", - "co2historicaldata_2014.txt | fix_co2_proj/global_co2historicaldata_2014.txt", - "co2historicaldata_2015.txt | fix_co2_proj/global_co2historicaldata_2015.txt", - "co2historicaldata_2016.txt | fix_co2_proj/global_co2historicaldata_2016.txt", - "co2historicaldata_2017.txt | fix_co2_proj/global_co2historicaldata_2017.txt", - "co2historicaldata_2018.txt | fix_co2_proj/global_co2historicaldata_2018.txt", - "co2historicaldata_2019.txt | fix_co2_proj/global_co2historicaldata_2019.txt", - "co2historicaldata_2020.txt | fix_co2_proj/global_co2historicaldata_2020.txt", - "co2historicaldata_2021.txt | fix_co2_proj/global_co2historicaldata_2021.txt", - "co2historicaldata_glob.txt | global_co2historicaldata_glob.txt", - "co2monthlycyc.txt | co2monthlycyc.txt", - "global_h2oprdlos.f77 | global_h2o_pltc.f77", - "global_albedo4.1x1.grb | global_albedo4.1x1.grb", - "global_zorclim.1x1.grb | global_zorclim.1x1.grb", - "global_tg3clim.2.6x1.5.grb | global_tg3clim.2.6x1.5.grb", - "sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt", - "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", - "global_o3prdlos.f77 | ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" - ] + SFC_CLIMO_FIELDS: + - facsf + - maximum_snow_albedo + - slope_type + - snowfree_albedo + - soil_type + - substrate_temperature + - vegetation_greenness + - vegetation_type # #----------------------------------------------------------------------- # # THOMPSON_FIX_FILES - # Thompson microphysics requires additional fix files at the run_fcst step, - # this array is appended to FIXgsm_FILES_TO_COPY_TO_FIXam at the workflow - # generation step. Since all these file names are the same when linked, - # appending CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING is handled in the - # generate_FV3LAM_wflow.py script. + # Thompson microphysics requires additional fix files at the run_fcst + # step. Files named here are added to the fv3.files_to_link section at + # configuration time. They can be found in the FIXam directory. # #----------------------------------------------------------------------- # - THOMPSON_FIX_FILES: [ - "CCN_ACTIVATE.BIN", - "freezeH2O.dat", - "qr_acr_qg.dat", - "qr_acr_qs.dat", - "qr_acr_qgV2.dat", - "qr_acr_qsV2.dat", - ] - + THOMPSON_FIX_FILES: + - CCN_ACTIVATE.BIN + - freezeH2O.dat + - qr_acr_qg.dat + - qr_acr_qs.dat + - qr_acr_qgV2.dat + - qr_acr_qsV2.dat diff --git a/parm/input.nml.FV3 b/parm/input.nml.FV3 index bd6244d2d9..18b01702e6 100644 --- a/parm/input.nml.FV3 +++ b/parm/input.nml.FV3 @@ -197,15 +197,6 @@ interp_method = 'conserve_great_circle' / -&nam_sfcperts -/ - -&nam_sppperts -/ - -&nam_stochy -/ - &namsfc fabsl = 99999 faisl = 99999 diff --git a/parm/model_configure b/parm/model_configure deleted file mode 100644 index d22adf3f3a..0000000000 --- a/parm/model_configure +++ /dev/null @@ -1,117 +0,0 @@ -start_year: {{ start_year }} -start_month: {{ start_month }} -start_day: {{ start_day }} -start_hour: {{ start_hour }} -start_minute: 0 -start_second: 0 -nhours_fcst: {{ nhours_fcst }} -fhrot: {{ fhrot }} -RUN_CONTINUE: .false. -ENS_SPS: .false. -dt_atmos: {{ dt_atmos }} -calendar: 'julian' -memuse_verbose: .false. -restart_interval: {{ restart_interval }} -output_1st_tstep_rst: .false. -write_dopost: {{ write_dopost }} -ideflate: 0 -nbits: 0 -ichunk2d: -1 -jchunk2d: -1 -ichunk3d: -1 -jchunk3d: -1 -kchunk3d: -1 -itasks: {{ itasks }} -quilting: {{ quilting }} -{% if quilting %} -# -# Write-component (quilting) computational parameters. -# -write_groups: {{ write_groups }} -write_tasks_per_group: {{ write_tasks_per_group }} -num_files: 2 -filename_base: 'dyn' 'phy' -output_file: 'netcdf' 'netcdf' -# -# Write-component output frequency parameter definitions: -# -# output_fh: Output frequency in hours. -# nsout: Output frequency in time steps (positive values override "output_fh"). -# -output_fh: {{ output_fh }} -1 -nsout: {{ nsout }} -# -# Coordinate system used by the output grid. -# -output_grid: '{{ output_grid }}' -# -# Parameter definitions for an output grid of type "{{ output_grid }}": -# - {%- if output_grid == "lambert_conformal" %} -# cen_lon: Longitude of center of grid (degrees). -# cen_lat: Latitude of center of grid (degrees). -# stdlat1: Latitude of first standard parallel (degrees). -# stdlat2: Latitude of second standard parallel (degrees). -# nx: Number of grid cells along x-axis in Lambert conformal (x,y) plane. -# ny: Number of grid cells along y-axis in Lambert conformal (x,y) plane. -# lon1: Longitude of center of grid cell at bottom-left corner of grid (degrees). -# lat1: Latitude of center of grid cell at bottom-left corner of grid (degrees). -# dx: Grid cell size in x direction (meters). -# dy: Grid cell size in y direction (meters). -# - {%- elif output_grid == "regional_latlon" %} -# cen_lon: Longitude of center of grid (degrees). -# cen_lat: Latitude of center of grid (degrees). -# lon1: Longitude of center of lower-left (southwest) grid cell (degrees). -# lat1: Latitude of center of lower-left (southwest) grid cell (degrees). -# lon2: Longitude of center of upper-right (northeast) grid cell (degrees). -# lat2: Latitude of center of upper-right (northeast) grid cell (degrees). -# dlon: Longitudinal grid size (degrees). -# dlat: Latitudinal grid size (degrees). -# - {%- elif output_grid == "rotated_latlon" %} -# cen_lon: Longitude of center of grid, expressed in the NON-ROTATED latlon coordinate -# system (degrees). This is also the longitude of the point at which the -# equator and prime meridian of the ROTATED coordinate system intersect (i.e. -# the point at which the longitude and latitude in the ROTATED latlon -# coordinate system are both 0). -# cen_lat: Latitude of center of grid, expressed in the NON-ROTATED latlon coordinate -# system (degrees). This is also the latitude of the point at which the -# equator and prime meridian of the ROTATED coordinate system intersect (i.e. -# the point at which the longitude and latitude in the ROTATED latlon -# coordinate system are both 0). -# lon1: Longitude of center of lower-left grid cell, expressed in the ROTATED latlon -# coordinate system (degrees). -# lat1: Latitude of center of lower-left grid cell, expressed in the ROTATED latlon -# coordinate system (degrees). -# lon2: Longitude of center of upper-right grid cell, expressed in the ROTATED latlon -# coordinate system (degrees). -# lat2: Latitude of center of upper-right grid cell, expressed in the ROTATED latlon -# coordinate system (degrees). -# dlon: Longitudinal grid size in the ROTATED latlon coordinate system (degrees). -# dlat: Latitudinal grid size in the ROTATED latlon coordinate system (degrees). -# - {%- endif %} - {%- if output_grid == "lambert_conformal" %} -cen_lon: {{ cen_lon }} -cen_lat: {{ cen_lat }} -stdlat1: {{ stdlat1 }} -stdlat2: {{ stdlat2 }} -nx: {{ nx }} -ny: {{ ny }} -lon1: {{ lon1 }} -lat1: {{ lat1 }} -dx: {{ dx }} -dy: {{ dy }} - {%- elif (output_grid == "regional_latlon") or (output_grid == "rotated_latlon") %} -cen_lon: {{ cen_lon }} -cen_lat: {{ cen_lat }} -lon1: {{ lon1 }} -lat1: {{ lat1 }} -lon2: {{ lon2 }} -lat2: {{ lat2 }} -dlon: {{ dlon }} -dlat: {{ dlat }} - {%- endif %} -{%- endif %} - diff --git a/parm/ufs.configure b/parm/ufs.configure index 48d2a66e8a..5a74ad6701 100644 --- a/parm/ufs.configure +++ b/parm/ufs.configure @@ -30,6 +30,7 @@ AQM_petlist_bounds: 0 {{ aqm_pe_member01_m1 }} AQM_attributes:: Verbosity = 0 Diagnostic = 0 + ResourceFile = aqm.yaml :: # Run Sequence # diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index 99ed689666..dabf22355e 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -104,7 +104,7 @@ metatask_run_ensemble: <<: *default_task command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/chgres_cube.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE | lower }} && &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_ics @@ -112,13 +112,6 @@ metatask_run_ensemble: join: cyclestr: value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - envars: - <<: *default_vars - SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' - nprocs: !int '{{ task_make_ics.chgres_cube.execution.batchargs.nodes * task_make_ics.chgres_cube.execution.batchargs.tasks_per_node }}' - join: - cyclestr: - value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nodes: '{{ task_make_ics.chgres_cube.execution.batchargs.nodes }}:ppn={{ task_make_ics.chgres_cube.execution.batchargs.tasks_per_node }}' walltime: '{{ task_make_ics.chgres_cube.execution.batchargs.walltime }}' dependency: @@ -155,7 +148,7 @@ metatask_run_ensemble: <<: *default_task command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/chgres_cube.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE | lower }} && &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_lbcs @@ -183,18 +176,20 @@ metatask_run_ensemble: task_run_fcst_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' - envars: - <<: *default_vars - SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' - nprocs: !int '{{ task_run_fcst.PE_MEMBER01 }}' + command: + cyclestr: + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE | lower }} && &SCRIPTSdir;/run_fcst.py + -c &GLOBAL_VAR_DEFNS_FP; + --cycle @Y-@m-@dT@H:@M:@S + --key-path task_run_fcst + --mem #mem#' join: cyclestr: value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - partition: '{{ "&PARTITION_FCST;" if platform.get("PARTITION_FCST") else "None" }}' + nodes: '{{ task_run_fcst.fv3.execution.batchargs.nodes // 1 }}:ppn={{ task_run_fcst.fv3.execution.batchargs.tasks_per_node // 1 }}' + partition: '{{ "&PARTITION_FCST;" if platform.get("PARTITION_FCST") }}' queue: '&QUEUE_FCST;' - nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' - walltime: '{{ task_run_fcst.walltime }}' + walltime: '{{ task_run_fcst.fv3.execution.batchargs.walltime }}' dependency: and: taskdep_make_ics: diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml index 318f288852..0f4c8c0f2c 100644 --- a/parm/wflow/post.yaml +++ b/parm/wflow/post.yaml @@ -12,7 +12,7 @@ metatask_run_ens_post: maxtries: '2' command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/upp.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE | lower }} && &SCRIPTSdir;/upp.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --leadtime #fhr#:00:00 diff --git a/parm/wflow/prep.yaml b/parm/wflow/prep.yaml index cf6b60fd2a..b87e121267 100644 --- a/parm/wflow/prep.yaml +++ b/parm/wflow/prep.yaml @@ -28,7 +28,7 @@ task_make_grid: task_make_orog: <<: *default_task - command: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/make_orog.py + command: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE | lower }} && &SCRIPTSdir;/make_orog.py -c &GLOBAL_VAR_DEFNS_FP; --key-path task_make_orog' join: @@ -52,7 +52,7 @@ task_make_orog: task_make_sfc_climo: <<: *default_task - command: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/make_sfc_climo.py + command: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE | lower }} && &SCRIPTSdir;/make_sfc_climo.py -c &GLOBAL_VAR_DEFNS_FP; --key-path task_make_sfc_climo' join: diff --git a/scripts/exregional_integration_test.py b/scripts/exregional_integration_test.py index 69f84795a5..980b760bb4 100755 --- a/scripts/exregional_integration_test.py +++ b/scripts/exregional_integration_test.py @@ -33,6 +33,8 @@ import unittest from pathlib import Path +from uwtools.api.config import get_yaml_config + # --------------Define some functions ------------------# @@ -86,9 +88,10 @@ def setup_logging(debug=False): ) parser.add_argument( "--fcst_inc", - default="1", + default=1, help="Increment of forecast in hours.", required=False, + type=int, ) parser.add_argument( "--debug", @@ -111,12 +114,8 @@ def setup_logging(debug=False): sys.exit(1) # Loop through model_configure file to find the netcdf base names - with open(MODEL_CONFIGURE_FP, "r", encoding="utf-8") as f: - for line in f: - if line.startswith("filename_base"): - filename_base_1 = line.split("'")[1] - filename_base_2 = line.split("'")[3] - break + model_configure = get_yaml_config(MODEL_CONFIGURE_FP) + filename_base_1, filename_base_2 = model_configure["filename_base"].split() # Create list of expected filenames from the experiment filename_list = [] @@ -128,6 +127,10 @@ def setup_logging(debug=False): filename_list.append(filename_1) filename_list.append(filename_2) + if not filename_list: + logging.error("No files were found to test") + sys.exit(1) + # Call unittest class TestExptFiles.fcst_dir = args.fcst_dir TestExptFiles.filename_list = filename_list diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 3d72d0c06d..af1c131506 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -57,7 +57,6 @@ # RUN_CMD_SERIAL # workflow: -# DOT_OR_USCORE # GRID_GEN_METHOD # RES_IN_FIXLAM_FILENAMES # RGNL_GRID_NML_FN @@ -114,6 +113,7 @@ sections=( for sect in ${sections[*]} ; do source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} done +DOT_OR_USCORE="_" # #----------------------------------------------------------------------- # @@ -724,30 +724,6 @@ python3 $USHdir/link_fix.py \ print_err_msg_exit "\ Call to function to create symlinks to the various grid and mosaic files failed." -# -#----------------------------------------------------------------------- -# -# Call a function (set_fv3nml_sfc_climo_filenames) to set the values of -# those variables in the forecast model's namelist file that specify the -# paths to the surface climatology files. These files will either already -# be avaialable in a user-specified directory (SFC_CLIMO_DIR) or will be -# generated by the TN_MAKE_SFC_CLIMO task. They (or symlinks to them) -# will be placed (or wll already exist) in the FIXlam directory. -# -#----------------------------------------------------------------------- -# -python3 $USHdir/set_fv3nml_sfc_climo_filenames.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - || print_err_msg_exit "\ -Call to function to set surface climatology file names in the FV3 namelist -file failed." -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# print_info_msg " ======================================================================== Grid files with various halo widths generated successfully!!! diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh deleted file mode 100755 index ce6032bae9..0000000000 --- a/scripts/exregional_run_fcst.sh +++ /dev/null @@ -1,952 +0,0 @@ -#!/usr/bin/env bash - - -# -#----------------------------------------------------------------------- -# -# This ex-script is responsible for running the FV3 regional forecast. -# -# Run-time environment variables: -# -# CDATE -# COMIN -# COMOUT -# COMROOT -# DATA -# DBNROOT -# GLOBAL_VAR_DEFNS_FP -# INPUT_DATA -# NET -# PDY -# REDIRECT_OUT_ERR -# RUN -# SENDDBN -# SLASH_ENSMEM_SUBDIR -# -# Experiment variables -# -# user: -# MACHINE -# PARMdir -# RUN_ENVIR -# USHdir -# -# platform: -# PRE_TASK_CMDS -# RUN_CMD_FCST -# -# workflow: -# CCPP_PHYS_DIR -# CCPP_PHYS_SUITE -# COLDSTART -# CRES -# DATA_TABLE_FN -# DATA_TABLE_FP -# DATE_FIRST_CYCL -# DOT_OR_USCORE -# EXPTDIR -# FCST_LEN_CYCL -# FCST_LEN_HRS -# FIELD_DICT_FP -# FIELD_DICT_FN -# FIELD_TABLE_FN -# FIELD_TABLE_FP -# FIXam -# FIXclim -# FIXlam -# FV3_NML_FN -# FV3_NML_FP -# FV3_NML_STOCH_FP -# INCR_CYCL_FREQ -# PREDEF_GRID_NAME -# SYMLINK_FIX_FILES -# VERBOSE -# -# task_get_extrn_lbcs: -# LBC_SPEC_INTVL_HRS -# -# task_run_fcst: -# DO_FCST_RESTART -# DT_ATMOS -# FV3_EXEC_FP -# KMP_AFFINITY_RUN_FCST -# OMP_NUM_THREADS_RUN_FCST -# OMP_STACKSIZE_RUN_FCST -# PRINT_ESMF -# RESTART_INTERVAL -# USE_MERRA_CLIMO -# WRITE_DOPOST -# -# global: -# DO_ENSEMBLE -# DO_LSM_SPP -# DO_SHUM -# DO_SKEB -# DO_SPP -# DO_SPPT -# -# cpl_aqm_parm: -# AQM_RC_PRODUCT_FN -# CPL_AQM -# -# constants: -# NH0 -# NH3 -# NH4 -# TILE_RGNL -# -# fixed_files: -# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -sections=( - user - nco - platform - workflow - global - cpl_aqm_parm - constants - fixed_files - task_get_extrn_lbcs.envvars - task_run_fcst -) -for sect in ${sections[*]} ; do - source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} -done - -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that runs a forecast with FV3 for the -specified cycle. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set environment variables. -# -#----------------------------------------------------------------------- -# -export KMP_AFFINITY=${KMP_AFFINITY_RUN_FCST} -export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_FCST} -export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST} -export MPI_TYPE_DEPTH=20 -export ESMF_RUNTIME_COMPLIANCECHECK=OFF:depth=4 -if [ $(boolify "${PRINT_ESMF}") = "TRUE" ]; then - export ESMF_RUNTIME_PROFILE=ON - export ESMF_RUNTIME_PROFILE_OUTPUT="SUMMARY" -fi -# -#----------------------------------------------------------------------- -# -# Load modules. -# -#----------------------------------------------------------------------- -# -eval ${PRE_TASK_CMDS} - -if [ -z "${RUN_CMD_FCST:-}" ] ; then - print_err_msg_exit "\ - Run command was not set in machine file. \ - Please set RUN_CMD_FCST for your platform" -else - print_info_msg "$VERBOSE" " - All executables will be submitted with command \'${RUN_CMD_FCST}\'." -fi - -if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then - cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) - CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) - FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} -fi - -# -#----------------------------------------------------------------------- -# -# Create links in the INPUT subdirectory of the current run directory to -# the grid and (filtered) orography files. -# -#----------------------------------------------------------------------- -# -print_info_msg "$VERBOSE" " -Creating links in the INPUT subdirectory of the current run directory to -the grid and (filtered) orography files ..." - -# Create links to fix files in the FIXlam directory. -cd ${DATA}/INPUT - -# -# For experiments in which the TN_MAKE_GRID task is run, we make the -# symlinks to the grid files relative because those files wlll be located -# within the experiment directory. This keeps the experiment directory -# more portable and the symlinks more readable. However, for experiments -# in which the TN_MAKE_GRID task is not run, pregenerated grid files will -# be used, and those will be located in an arbitrary directory (specified -# by the user) that is somwehere outside the experiment directory. Thus, -# in this case, there isn't really an advantage to using relative symlinks, -# so we use symlinks with absolute paths. -# -if [[ -d "${EXPTDIR}/grid" ]]; then - relative_link_flag="TRUE" -else - relative_link_flag="FALSE" -fi - -# Symlink to mosaic file with a completely different name. -#target="${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo${NH4}.nc" # Should this point to this halo4 file or a halo3 file??? -target="${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo${NH3}.nc" # Should this point to this halo4 file or a halo3 file??? -symlink="grid_spec.nc" -create_symlink_to_file $target $symlink ${relative_link_flag} - -# Symlink to halo-3 grid file with "halo3" stripped from name. -mosaic_fn="grid_spec.nc" -grid_fn=$( get_charvar_from_netcdf "${mosaic_fn}" "gridfiles" ) - -target="${FIXlam}/${grid_fn}" -symlink="${grid_fn}" -create_symlink_to_file $target $symlink ${relative_link_flag} - -# Symlink to halo-4 grid file with "${CRES}_" stripped from name. -# -# If this link is not created, then the code hangs with an error message -# like this: -# -# check netcdf status= 2 -# NetCDF error No such file or directory -# Stopped -# -# Note that even though the message says "Stopped", the task still con- -# sumes core-hours. -# -target="${FIXlam}/${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH4}.nc" -symlink="grid.tile${TILE_RGNL}.halo${NH4}.nc" -create_symlink_to_file $target $symlink ${relative_link_flag} - - -# -# As with the symlinks grid files above, when creating the symlinks to -# the orography files, use relative paths if running the TN_MAKE_OROG -# task and absolute paths otherwise. -# -if [ -d "${EXPTDIR}/orog" ]; then - relative_link_flag="TRUE" -else - relative_link_flag="FALSE" -fi - -# Symlink to halo-0 orography file with "${CRES}_" and "halo0" stripped from name. -target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH0}.nc" -symlink="oro_data.nc" -create_symlink_to_file $target $symlink ${relative_link_flag} -# -# Symlink to halo-4 orography file with "${CRES}_" stripped from name. -# -# If this link is not created, then the code hangs with an error message -# like this: -# -# check netcdf status= 2 -# NetCDF error No such file or directory -# Stopped -# -# Note that even though the message says "Stopped", the task still con- -# sumes core-hours. -# -target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" -symlink="oro_data.tile${TILE_RGNL}.halo${NH4}.nc" -create_symlink_to_file $target $symlink ${relative_link_flag} -# -# If using the FV3_HRRR physics suite, there are two files (that contain -# statistics of the orography) that are needed by the gravity wave drag -# parameterization in that suite. Below, create symlinks to these files -# in the run directory. Note that the symlinks must have specific names -# that the FV3 model is hardcoded to recognize, and those are the names -# we use below. -# -suites=( "FV3_RAP" "FV3_HRRR" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" ) -if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then - file_ids=( "ss" "ls" ) - for file_id in "${file_ids[@]}"; do - target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data_${file_id}.tile${TILE_RGNL}.halo${NH0}.nc" - symlink="oro_data_${file_id}.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - done -fi -# -#----------------------------------------------------------------------- -# -# The FV3 model looks for the following files in the INPUT subdirectory -# of the run directory: -# -# gfs_data.nc -# sfc_data.nc -# gfs_bndy*.nc -# gfs_ctrl.nc -# -# Some of these files (gfs_ctrl.nc, gfs_bndy*.nc) already exist, but -# others do not. Thus, create links with these names to the appropriate -# files (in this case the initial condition and surface files only). -# -#----------------------------------------------------------------------- -# -print_info_msg "$VERBOSE" " -Creating links with names that FV3 looks for in the INPUT subdirectory -of the current run directory (DATA), where - DATA = \"${DATA}\" -..." - -cd ${DATA}/INPUT - -# -# The symlinks to be created point to files in the same directory (INPUT), -# so it's most straightforward to use relative paths. -# -relative_link_flag="FALSE" - -if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later - - target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" - symlink="gfs_data.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - - target="${COMIN}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" - symlink="sfc_data.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - - target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" - symlink="gfs_ctrl.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - - for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do - target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" - symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - done - target="${COMIN}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc" - symlink="NEXUS_Expt.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - - # create symlink to PT for point source in SRW-AQM - target="${COMIN}/${NET}.${cycle}${dot_ensmem}.PT.nc" - if [ -f ${target} ]; then - symlink="PT.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - fi - -else - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" - symlink="gfs_data.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" - symlink="sfc_data.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" - symlink="gfs_ctrl.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - - for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" - symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" - create_symlink_to_file $target $symlink ${relative_link_flag} - done -fi -# -#----------------------------------------------------------------------- -# -# Create links in the current run directory to fixed (i.e. static) files -# in the FIXam directory. These links have names that are set to the -# names of files that the forecast model expects to exist in the current -# working directory when the forecast model executable is called (and -# that is just the run directory). -# -#----------------------------------------------------------------------- -# -cd ${DATA} - -print_info_msg "$VERBOSE" " -Creating links in the current run directory (DATA) to fixed (i.e. -static) files in the FIXam directory: - FIXam = \"${FIXam}\" - DATA = \"${DATA}\"" -# -# For experiments that are run in "community" mode, the FIXam directory -# is an actual directory (i.e. not a symlink) located under the experiment -# directory containing actual files (i.e. not symlinks). In this case, -# we use relative paths for the symlinks in order to keep the experiment -# directory more portable and the symlinks more readable. However, for -# experiments that are run in "nco" mode, the FIXam directory is a symlink -# under the experiment directory that points to an arbitrary (user specified) -# location outside the experiment directory. Thus, in this case, there -# isn't really an advantage to using relative symlinks, so we use symlinks -# with absolute paths. -# -if [ $(boolify "${SYMLINK_FIX_FILES}") = "FALSE" ]; then - relative_link_flag="TRUE" -else - relative_link_flag="FALSE" -fi - -regex_search="^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" -num_symlinks=${#CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[@]} -for (( i=0; i<${num_symlinks}; i++ )); do - - mapping="${CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[$i]}" - symlink=$( printf "%s\n" "$mapping" | \ - $SED -n -r -e "s/${regex_search}/\1/p" ) - target=$( printf "%s\n" "$mapping" | \ - $SED -n -r -e "s/${regex_search}/\2/p" ) - - symlink="${DATA}/$symlink" - target="$FIXam/$target" - create_symlink_to_file $target $symlink ${relative_link_flag} - -done -# -#----------------------------------------------------------------------- -# -# Create links in the current run directory to the MERRA2 aerosol -# climatology data files and lookup table for optics properties. -# -#----------------------------------------------------------------------- -# -if [ $(boolify "${USE_MERRA_CLIMO}") = "TRUE" ]; then - for f_nm_path in ${FIXclim}/*; do - f_nm=$( basename "${f_nm_path}" ) - pre_f="${f_nm%%.*}" - - if [ "${pre_f}" = "merra2" ]; then - mnth=$( printf "%s\n" "${f_nm}" | grep -o -P '(?<=2014.m).*(?=.nc)' ) - symlink="${DATA}/aeroclim.m${mnth}.nc" - else - symlink="${DATA}/${pre_f}.dat" - fi - target="${f_nm_path}" - create_symlink_to_file $target $symlink ${relative_link_flag} - done -fi -# -#----------------------------------------------------------------------- -# -# If running this cycle/ensemble member combination more than once (e.g. -# using rocotoboot), remove any time stamp file that may exist from the -# previous attempt. -# -#----------------------------------------------------------------------- -# -cd ${DATA} -rm -f time_stamp.out -# -#----------------------------------------------------------------------- -# -# Create links in the current run directory to cycle-independent (and -# ensemble-member-independent) model input files in the main experiment -# directory. -# -#----------------------------------------------------------------------- -# -print_info_msg "$VERBOSE" " -Creating links in the current run directory to cycle-independent model -input files in the main experiment directory..." -# -# For experiments that are run in "community" mode, the model input files -# to which the symlinks will point are under the experiment directory. -# Thus, in this case, we use relative paths for the symlinks in order to -# keep the experiment directory more portable and the symlinks more readable. -# However, for experiments that are run in "nco" mode, the experiment -# directory in which the model input files are located is in general -# completely different than the run directory in which the symlinks will -# be created. Thus, in this case, there isn't really an advantage to -# using relative symlinks, so we use symlinks with absolute paths. -# -if [ "${RUN_ENVIR}" != "nco" ]; then - relative_link_flag="TRUE" -else - relative_link_flag="FALSE" -fi - -create_symlink_to_file ${DATA_TABLE_FP} ${DATA}/${DATA_TABLE_FN} ${relative_link_flag} - -create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_link_flag} - -create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag} - -set -x -if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then - cycle="${PDY:0:4}-${PDY:4:2}-${PDY:6:2}T$cyc" - for task in control_file files_copied files_linked namelist_file ; do - uw upp $task -c ${GLOBAL_VAR_DEFNS_FP} --leadtime 999 --cycle $cycle --key-path task_run_post - done - mv $DATA/postprd/999/* . - rmdir $DATA/postprd/999 - # remove some unused (unrecognized) entries -(cat << EOF -model_inputs: - datestr: !remove - filename: !remove - filenameflux: !remove - grib: !remove - ioform: !remove -EOF -) | - uw config realize \ - -i itag \ - --input-format nml \ - -o itag \ - --output-format nml \ - --update-format yaml - - cp postxconfig-NT.txt postxconfig-NT_FH00.txt -fi - -# -#---------------------------------------------------------------------- -# -# NOAHMP table copied from CCPP physics directory into $DATA directory. -# This is a temporary solution that will need to be changed once NOAHMP -# is included as a submodule in the weather model. -# -#---------------------------------------------------------------------- -# - -cp ${CCPP_PHYS_DIR}/noahmptable.tbl . - -# -#----------------------------------------------------------------------- -# -# Choose namelist file to use -# -#----------------------------------------------------------------------- -# -STOCH="FALSE" -if ([ $(boolify "${DO_SPP}") = "TRUE" ] || \ - [ $(boolify "${DO_SPPT}") = "TRUE" ] || \ - [ $(boolify "${DO_SHUM}") = "TRUE" ] || \ - [ $(boolify "${DO_SKEB}") = "TRUE" ] || \ - [ $(boolify "${DO_LSM_SPP}") = "TRUE" ]); then - STOCH="TRUE" -fi -if [ "${STOCH}" = "TRUE" ]; then - cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} - else - ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} -fi - -# -#----------------------------------------------------------------------- -# -# Set stochastic physics seeds -# -#----------------------------------------------------------------------- -# -if ([ "$STOCH" == "TRUE" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]); then - python3 $USHdir/set_fv3nml_ens_stoch_seeds.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --cdate "$CDATE" || print_err_msg_exit "\ -Call to function to create the ensemble-based namelist for the current -cycle's (cdate) run directory (DATA) failed: - cdate = \"${CDATE}\" - DATA = \"${DATA}\"" -fi -# -#----------------------------------------------------------------------- -# -# Replace parameter values for air quality modeling using AQM_NA_13km -# in FV3 input.nml and model_configure. -# -#----------------------------------------------------------------------- -# -if [ $(boolify "${CPL_AQM}") = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then - python3 $USHdir/update_input_nml.py \ - --namelist "${DATA}/${FV3_NML_FN}" \ - --aqm_na_13km || print_err_msg_exit "\ -Call to function to update the FV3 input.nml file for air quality modeling -using AQM_NA_13km for the current cycle's (cdate) run directory (DATA) failed: - cdate = \"${CDATE}\" - DATA = \"${DATA}\"" -fi -# -#----------------------------------------------------------------------- -# -# Replace parameter values for restart in FV3 input.nml and model_configure. -# Add restart files to INPUT directory. -# -#----------------------------------------------------------------------- -# -flag_fcst_restart="FALSE" -if [ $(boolify "${DO_FCST_RESTART}") = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then - cp input.nml input.nml_orig - cp model_configure model_configure_orig - if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - cp aqm.rc aqm.rc_orig - fi - relative_link_flag="FALSE" - flag_fcst_restart="TRUE" - - # Update FV3 input.nml for restart - python3 $USHdir/update_input_nml.py \ - --namelist "${DATA}/${FV3_NML_FN}" \ - --restart - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to function to update the FV3 input.nml file for restart -for the current cycle's (cdate) run directory (DATA) failed: - cdate = \"${CDATE}\" - DATA = \"${DATA}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - # Check that restart files exist at restart_interval - file_ids=( "coupler.res" "fv_core.res.nc" "fv_core.res.tile1.nc" "fv_srf_wnd.res.tile1.nc" "fv_tracer.res.tile1.nc" "phy_data.nc" "sfc_data.nc" ) - num_file_ids=${#file_ids[*]} - IFS=' ' - read -a restart_hrs <<< "${RESTART_INTERVAL}" - num_restart_hrs=${#restart_hrs[*]} - - for (( ih_rst=${num_restart_hrs}-1; ih_rst>=0; ih_rst-- )); do - cdate_restart_hr=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC + ${restart_hrs[ih_rst]} hours" "+%Y%m%d%H" ) - rst_yyyymmdd="${cdate_restart_hr:0:8}" - rst_hh="${cdate_restart_hr:8:2}" - - num_rst_files=0 - for file_id in "${file_ids[@]}"; do - if [ -e "${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" ]; then - (( num_rst_files=num_rst_files+1 )) - fi - done - if [ "${num_rst_files}" = "${num_file_ids}" ]; then - FHROT="${restart_hrs[ih_rst]}" - break - fi - done - - # Create soft-link of restart files in INPUT directory - cd ${DATA}/INPUT - for file_id in "${file_ids[@]}"; do - rm "${file_id}" - target="${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" - symlink="${file_id}" - create_symlink_to_file $target $symlink ${relative_link_flag} - done - cd ${DATA} -fi -# -#----------------------------------------------------------------------- -# -# Setup air quality model cold/warm start -# -#----------------------------------------------------------------------- -# -if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - if [ $(boolify "${COLDSTART}") = "TRUE" ] && \ - [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && \ - [ $(boolify "${flag_fcst_restart}") = "FALSE" ]; then - init_concentrations="true" - else - init_concentrations="false" - fi -# -#----------------------------------------------------------------------- -# -# Call the function that creates the aqm.rc file within each -# cycle directory. -# -#----------------------------------------------------------------------- -# - python3 $USHdir/create_aqm_rc_file.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --cdate "$CDATE" \ - --run-dir "${DATA}" \ - --init_concentrations "${init_concentrations}" - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to function to create an aqm.rc file for the current -cycle's (cdate) run directory (DATA) failed: - cdate = \"${CDATE}\" - DATA = \"${DATA}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi -fi - -# -#----------------------------------------------------------------------- -# -# Call the function that creates the model configuration file within each -# cycle directory. -# -#----------------------------------------------------------------------- -# -python3 $USHdir/create_model_configure_file.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --cdate "$CDATE" \ - --fcst_len_hrs "${FCST_LEN_HRS}" \ - --fhrot "${FHROT}" \ - --run-dir "${DATA}" \ - --dt-atmos "${DT_ATMOS}" -export err=$? -if [ $err -ne 0 ]; then - message_txt="Call to function to create a model configuration file -for the current cycle's (cdate) run directory (DATA) failed: - cdate = \"${CDATE}\" - DATA = \"${DATA}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi -fi -# -#----------------------------------------------------------------------- -# -# Call the function that creates the diag_table file within each cycle -# directory. -# -#----------------------------------------------------------------------- -# -python3 $USHdir/create_diag_table_file.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run-dir "${DATA}" -export err=$? -if [ $err -ne 0 ]; then - message_txt="Call to function to create a diag table file for the current -cycle's (cdate) run directory (DATA) failed: - DATA = \"${DATA}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi -fi -# -#----------------------------------------------------------------------- -# -# Pre-generate symlink to forecast RESTART in DATA for early start of -# the next cycle -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - # create an intermediate symlink to RESTART - ln -sf "${DATA}/RESTART" "${COMIN}/RESTART" -fi -# -#----------------------------------------------------------------------- -# -# Call the function that creates the NEMS configuration file within each -# cycle directory. -# -#----------------------------------------------------------------------- -# -python3 $USHdir/create_ufs_configure_file.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run-dir "${DATA}" -export err=$? -if [ $err -ne 0 ]; then - message_txt="Call to function to create a NEMS configuration file for -the current cycle's (cdate) run directory (DATA) failed: - DATA = \"${DATA}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi -fi -# -#----------------------------------------------------------------------- -# -# Run the FV3-LAM model. Note that we have to launch the forecast from -# the current cycle's directory because the FV3 executable will look for -# input files in the current directory. Since those files have been -# staged in the cycle directory, the current directory must be the cycle -# directory (which it already is). -# -#----------------------------------------------------------------------- -# -PREP_STEP -eval ${RUN_CMD_FCST} ${FV3_EXEC_FP} ${REDIRECT_OUT_ERR} -export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run FV3-LAM forecast returned with nonzero exit code." - fi -fi -POST_STEP -# -#----------------------------------------------------------------------- -# -# Move RESTART directory to COMIN and create symlink in DATA only for -# NCO mode and when it is not empty. -# -# Move AQM output product file to COMOUT only for NCO mode in Online-CMAQ. -# Move dyn and phy files to COMIN only if run_post and write_dopost are off. -# -#----------------------------------------------------------------------- -# -if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - if [ "${RUN_ENVIR}" = "nco" ]; then - if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then - rm -rf "${COMIN}/RESTART" - fi - if [ "$(ls -A ${DATA}/RESTART)" ]; then - cp -Rp ${DATA}/RESTART ${COMIN} - fi - fi - - cp -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} - - fhr_ct=0 - fhr=0 - while [ $fhr -le ${FCST_LEN_HRS} ]; do - fhr_ct=$(printf "%03d" $fhr) - source_dyn="${DATA}/dynf${fhr_ct}.nc" - source_phy="${DATA}/phyf${fhr_ct}.nc" - target_dyn="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc" - target_phy="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc" - [ -f ${source_dyn} ] && cp -p ${source_dyn} ${target_dyn} - [ -f ${source_phy} ] && cp -p ${source_phy} ${target_phy} - (( fhr=fhr+1 )) - done -fi -# -#----------------------------------------------------------------------- -# -# If doing inline post, create the directory in which the post-processing -# output will be stored (postprd_dir). -# -#----------------------------------------------------------------------- -# -if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then - - yyyymmdd=${PDY} - hh=${cyc} - fmn="00" - - if [ "${RUN_ENVIR}" != "nco" ]; then - export COMOUT="${DATA}/postprd" - fi - mkdir -p "${COMOUT}" - - cd ${COMOUT} - - for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do - - if [ ${fhr:0:1} = "0" ]; then - fhr_d=${fhr:1:2} - else - fhr_d=${fhr} - fi - - post_output_domain_name=$(uw config realize -i $GLOBAL_VAR_DEFNS_FP --output-format yaml \ - --key-path task_run_post.post_output_domain_name) - post_time=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr_d} hours + ${fmn} minutes" "+%Y%m%d%H%M" ) - post_mn=${post_time:10:2} - post_mn_or_null="" - post_fn_suffix="GrbF${fhr_d}" - post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${post_output_domain_name}.grib2" - - if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - fids=( "cmaq" ) - else - fids=( "prslev" "natlev" ) - fi - - for fid in "${fids[@]}"; do - FID=$(echo_uppercase $fid) - post_orig_fn="${FID}.${post_fn_suffix}" - post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - - mv ${DATA}/${post_orig_fn} ${post_renamed_fn} - if [ $RUN_ENVIR != "nco" ]; then - basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) - symlink_suffix="_${basetime}f${fhr}${post_mn}" - create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE - fi - # DBN alert - if [ "$SENDDBN" = "TRUE" ]; then - $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} - fi - done - - if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc - mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc - fi - done - -fi -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -FV3 forecast completed successfully!!! - -Exiting script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" -========================================================================" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/scripts/run_fcst.py b/scripts/run_fcst.py new file mode 100755 index 0000000000..085ee5ed54 --- /dev/null +++ b/scripts/run_fcst.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python +""" +The run script for run_fcst. +""" + +import datetime as dt +import logging +import os +import sys +from argparse import ArgumentParser +from copy import deepcopy +from pathlib import Path + +from uwtools.api.config import get_yaml_config +from uwtools.api.fs import link as uwlink +from uwtools.api.fv3 import FV3 +from uwtools.api.logging import use_uwtools_logger +from uwtools.api.template import render +from uwtools.api.upp import UPP + + +def _walk_key_path(config, key_path): + """ + Navigate to the sub-config at the end of the path of given keys. + """ + keys = [] + pathstr = "" + for key in key_path: + keys.append(key) + pathstr = " -> ".join(keys) + try: + subconfig = config[key] + except KeyError: + logging.error(f"Bad config path: {pathstr}") + raise + if not isinstance(subconfig, dict): + logging.error(f"Value at {pathstr} must be a dictionary") + sys.exit(1) + config = subconfig + return config + + +def link_files(dest_dir, files): + """ + Link a given list of files to the destination directory using the same file names. + """ + for fpath in files: + path = Path(fpath) + linkname = dest_dir / path.name + if linkname.is_symlink(): + linkname.unlink() + logging.info(f"Linking {linkname} -> {path}") + linkname.symlink_to(path) + + +def parse_args(argv): + """ + Parse arguments for the script. + """ + parser = ArgumentParser( + description="Script that runs FV3 via uwtools API.", + ) + parser.add_argument( + "-c", + "--config-file", + metavar="PATH", + required=True, + help="Path to experiment config file.", + type=Path, + ) + parser.add_argument( + "--cycle", + help="The cycle in ISO8601 format (e.g. 2024-07-15T18).", + required=True, + type=dt.datetime.fromisoformat, + ) + parser.add_argument( + "--key-path", + help="Dot-separated path of keys leading through the config to the driver's YAML block.", + metavar="KEY[.KEY...]", + required=True, + type=lambda s: s.split("."), + ) + parser.add_argument( + "--member", + default="000", + help="The 3-digit ensemble member number.", + ) + return parser.parse_args(argv) + + +def run_fcst(config_file, cycle, key_path, member): + """ + Setup and run the FV3 Driver. + """ + expt_config = get_yaml_config(config_file) + + # The experiment config will have {{ CRES | env }} and {{ MEMBER | env }} expressions in it that + # need to be dereferenced during driver initialization + os.environ["CRES"] = expt_config["workflow"]["CRES"] + os.environ["MEMBER"] = member + + restart = False + if restart: + restart_settings = { + "fv_core_nml": { + "external_ic": False, + "make_nh": False, + "mountain": True, + "na_init": 0, + "nggps_ic": False, + "warm_start": True, + }, + "gfs_physics_nml": { + "nstf_name": [2, 0, 0, 0, 0], + }, + } + expt_config.update_from( + { + "task_run_fcst": { + "fv3": {"namelist": {"update_values": restart_settings}} + } + } + ) + + fv3_driver = FV3( + config=expt_config, + cycle=cycle, + key_path=key_path, + ) + rundir = Path(fv3_driver.config["rundir"]) + + if expt_config["cpl_aqm_parm"]["CPL_AQM"]: + restart_overrides = {} + if restart: + restart_overrides["init_concentrations"] = False + + # Prepare the rc file from a template + aqm_block = _walk_key_path(expt_config, key_path + ["aqm"]) + render( + input_file=aqm_block["template_file"], + output_file=rundir / "aqm.rc", + overrides=restart_overrides, + values_src=aqm_block["template_values"], + ) + + # Prepare config files for inline post, if needed + model_configure_block = _walk_key_path( + fv3_driver.config, + ["model_configure", "update_values"], + ) + if do_post := model_configure_block["write_dopost"]: + upp_driver = UPP( + config=expt_config, + cycle=cycle, + leadtime=dt.timedelta(hours=999), + key_path=key_path, + ) + upp_driver.control_file() + upp_driver.files_copied() + upp_driver.files_linked() + upp_driver.namelist_file() + + ufs_configure_block = _walk_key_path(expt_config, key_path + ["ufs_configure"]) + render( + input_file=ufs_configure_block["template_file"], + output_file=rundir / "ufs.configure", + values_src=ufs_configure_block["template_values"], + ) + + # Run the FV3 program via UW driver + logging.info(f"Will run FV3 in {rundir}") + fv3_driver.run() + + if not (rundir / "runscript.fv3.done").is_file(): + logging.error("Error occurred running FV3. Please see component error logs.") + sys.exit(1) + + # Deliver output data + if do_post: + fcst_len = fv3_driver.config["length"] + output_fh = fv3_driver.config["model_configure"]["update_values"][ + "output_fh" + ].split() + if len(output_fh) == 2 and output_fh[-1] == -1: + expected_output_hours = range(0, fcst_len + 1, int(output_fh[0])) + else: + expected_output_hours = [int(x) for x in output_fh] + + upp_config = _walk_key_path(expt_config, ["task_run_post"]) + + for fcst_hr in expected_output_hours: + links = {} + for label in upp_config["output_file_labels"]: + # deepcopy here because desired_output_name is parameterized within the loop + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) + expt_config_cp.dereference( + context={ + "cycle": cycle, + "leadtime": dt.timedelta(hours=fcst_hr), + "file_label": label, + **expt_config_cp, + } + ) + upp_block = _walk_key_path(expt_config_cp, ["task_run_post"]) + desired_output_fn = upp_block["desired_output_name"] + upp_output_fn = rundir / f"{label.upper()}.GrbF{fcst_hr:02d}" + links[desired_output_fn] = str(upp_output_fn) + + uwlink(target_dir=rundir / "postprd", config=links) + + +if __name__ == "__main__": + + use_uwtools_logger() + + args = parse_args(sys.argv[1:]) + run_fcst( + config_file=args.config_file, + cycle=args.cycle, + key_path=args.key_path, + member=args.member, + ) diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml index 383fd57be4..4f83fe55ea 100644 --- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml @@ -35,14 +35,18 @@ task_get_extrn_lbcs: EXTRN_MDL_LBCS_OFFSET_HRS: 0 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - walltime: 01:20:00 - OMP_NUM_THREADS_RUN_FCST: 1 + fv3: + execution: + batchargs: + threads: 1 + walltime: 01:20:00 + model_configure: + update_values: + restart_interval: 12 24 DT_ATMOS: 180 LAYOUT_X: 50 LAYOUT_Y: 34 BLOCKSIZE: 16 - RESTART_INTERVAL: 12 24 - QUILTING: true PRINT_ESMF: false DO_FCST_RESTART: false task_run_post: diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid.yaml index ab2c8e8ef3..f81cb0a9d7 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid.yaml @@ -31,11 +31,13 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - DT_ATMOS: 40 + fv3: + model_configure: + update_values: + dt_atmos: 40 LAYOUT_X: 8 LAYOUT_Y: 12 BLOCKSIZE: 13 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: 12 WRTCMP_output_grid: lambert_conformal diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Central_Asia_3km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Central_Asia_3km.yaml index 50827956c4..87f19b833f 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Central_Asia_3km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Central_Asia_3km.yaml @@ -31,11 +31,16 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - DT_ATMOS: 45 + fv3: + model_configure: + update_values: + dt_atmos: 45 + execution: + batchargs: + walltime: 01:00:00 LAYOUT_X: 12 LAYOUT_Y: 12 BLOCKSIZE: 32 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: '{{ LAYOUT_Y }}' WRTCMP_output_grid: lambert_conformal @@ -49,6 +54,5 @@ task_run_fcst: WRTCMP_ny: 285 WRTCMP_dx: '{{ task_make_grid.ESGgrid_DELX }}' WRTCMP_dy: '{{ task_make_grid.ESGgrid_DELY }}' - walltime: 01:00:00 task_run_post: post_output_domain_name: custom_ESGgrid_central_asia diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml index 4cecf8b73b..05b4cf75c7 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml @@ -37,12 +37,16 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: RAP LBC_SPEC_INTVL_HRS: 3 task_run_fcst: - walltime: 01:00:00 - DT_ATMOS: 90 + fv3: + model_configure: + update_values: + dt_atmos: 90 + execution: + batchargs: + walltime: 01:00:00 LAYOUT_X: 8 LAYOUT_Y: 10 BLOCKSIZE: 32 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: '{{ LAYOUT_Y }}' WRTCMP_output_grid: lambert_conformal diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_IndianOcean_6km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_IndianOcean_6km.yaml index 9200aefac1..f4f3dab479 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_IndianOcean_6km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_IndianOcean_6km.yaml @@ -33,11 +33,16 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - DT_ATMOS: 60 + fv3: + model_configure: + update_values: + dt_atmos: 60 + execution: + batchargs: + walltime: 01:00:00 LAYOUT_X: 6 LAYOUT_Y: 6 BLOCKSIZE: 32 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: '{{ LAYOUT_Y }}' WRTCMP_output_grid: lambert_conformal @@ -51,6 +56,5 @@ task_run_fcst: WRTCMP_ny: 145 WRTCMP_dx: '{{ task_make_grid.ESGgrid_DELX }}' WRTCMP_dy: '{{ task_make_grid.ESGgrid_DELY }}' - walltime: 01:00:00 task_run_post: post_output_domain_name: custom_ESGgrid_Indian_Ocean diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_NewZealand_3km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_NewZealand_3km.yaml index d49a8a5314..500dde6be9 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_NewZealand_3km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_NewZealand_3km.yaml @@ -33,11 +33,16 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - DT_ATMOS: 40 + fv3: + model_configure: + update_values: + dt_atmos: 40 + execution: + batchargs: + walltime: 01:00:00 LAYOUT_X: 16 LAYOUT_Y: 14 BLOCKSIZE: 32 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: '{{ LAYOUT_Y }}' WRTCMP_output_grid: lambert_conformal @@ -51,6 +56,5 @@ task_run_fcst: WRTCMP_ny: 495 WRTCMP_dx: '{{ task_make_grid.ESGgrid_DELX }}' WRTCMP_dy: '{{ task_make_grid.ESGgrid_DELY }}' - walltime: 01:00:00 task_run_post: post_output_domain_name: custom_ESGgrid_New_Zealand diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Peru_12km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Peru_12km.yaml index 7d14578137..e5be61af37 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Peru_12km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Peru_12km.yaml @@ -33,11 +33,16 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - DT_ATMOS: 90 + fv3: + model_configure: + update_values: + dt_atmos: 90 + execution: + batchargs: + walltime: 01:00:00 LAYOUT_X: 8 LAYOUT_Y: 8 BLOCKSIZE: 32 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: '{{ LAYOUT_Y }}' WRTCMP_output_grid: lambert_conformal @@ -51,7 +56,6 @@ task_run_fcst: WRTCMP_ny: 235 WRTCMP_dx: '{{ task_make_grid.ESGgrid_DELX }}' WRTCMP_dy: '{{ task_make_grid.ESGgrid_DELY }}' - walltime: 01:00:00 task_run_post: post_output_domain_name: custom_ESGgrid_Peru diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml index 27ee0dcaf4..69a9c903df 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml @@ -40,11 +40,16 @@ task_make_ics: batchargs: nodes: 16 task_run_fcst: - DT_ATMOS: 15 + fv3: + model_configure: + update_values: + dt_atmos: 15 + execution: + batchargs: + walltime: 02:30:00 LAYOUT_X: 24 LAYOUT_Y: 24 BLOCKSIZE: 35 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: '{{ LAYOUT_Y }}' WRTCMP_output_grid: lambert_conformal @@ -58,6 +63,5 @@ task_run_fcst: WRTCMP_ny: 557 WRTCMP_dx: '{{ task_make_grid.ESGgrid_DELX }}' WRTCMP_dy: '{{ task_make_grid.ESGgrid_DELY }}' - walltime: 02:30:00 task_run_post: post_output_domain_name: custom_ESGgrid_SF_Bay_Area diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid.yaml index 128702b3df..613611d4c4 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid.yaml @@ -43,11 +43,13 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - DT_ATMOS: 100 + fv3: + model_configure: + update_values: + dt_atmos: 100 LAYOUT_X: 6 LAYOUT_Y: 6 BLOCKSIZE: 26 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: 6 WRTCMP_output_grid: rotated_latlon diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE.yaml index 2a0dd2a604..11b9a8d879 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE.yaml @@ -37,11 +37,13 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - DT_ATMOS: 100 + fv3: + model_configure: + update_values: + dt_atmos: 100 LAYOUT_X: 6 LAYOUT_Y: 6 BLOCKSIZE: 26 - QUILTING: true WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: 6 WRTCMP_output_grid: rotated_latlon diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2.yaml index d70a111b95..b246ef2baf 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2.yaml @@ -24,5 +24,8 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - WRITE_DOPOST: true - ITASKS: 2 + fv3: + model_configure: + update_values: + write_dopost: true + itasks: 2 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml index 6190a48560..ba5c09e8ba 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml @@ -30,4 +30,7 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - QUILTING: false + fv3: + model_configure: + update_values: + quilting: false diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml index 63a703216c..3775b9423b 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml @@ -34,6 +34,9 @@ task_get_extrn_lbcs: EXTRN_MDL_LBCS_OFFSET_HRS: 0 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - walltime: 01:00:00 + fv3: + execution: + batchargs: + walltime: 01:00:00 verification: VX_FCST_MODEL_NAME: FV3_GFS_v15p2_CONUS_25km diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml index 1a8311e455..ffb61be097 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml @@ -33,7 +33,10 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: - walltime: 01:00:00 + fv3: + execution: + batchargs: + walltime: 01:00:00 global: DO_ENSEMBLE: true NUM_ENS_MEMBERS: 2 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR.yaml index fa1823ea51..ac61f4140e 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR.yaml @@ -29,7 +29,10 @@ task_get_extrn_lbcs: EXTRN_MDL_FILES_LBCS: - '{yy}{jjj}{hh}00{fcst_hr:02d}00' task_run_fcst: - DT_ATMOS: 75 + fv3: + model_configure: + update_values: + dt_atmos: 75 LAYOUT_X: 10 LAYOUT_Y: 4 BLOCKSIZE: 23 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml index e034d1a19c..1ff27efca7 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml index b9cb285dd2..38a0a342a6 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml index b50dab0c15..dafc2ff5d6 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml index af9d2e6f2d..190fe50f50 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml index 8c724913e8..38329c76ef 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml index 18bdc52fc9..35a5fc9aca 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml index 7bab5e12c5..8a43cbf9b2 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml index 5bfe902435..c81d6a617f 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml @@ -37,7 +37,7 @@ task_make_lbcs: batchargs: walltime: 06:00:00 task_run_fcst: - chgres_cube: + fv3: execution: batchargs: walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml index 7c32b83867..e2b8af62a4 100644 --- a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml @@ -33,7 +33,10 @@ task_get_extrn_lbcs: FV3GFS_FILE_FMT_LBCS: netcdf LBC_SPEC_INTVL_HRS: 3 task_run_fcst: - walltime: 01:00:00 + fv3: + execution: + batchargs: + walltime: 01:00:00 global: DO_ENSEMBLE: true diff --git a/tests/WE2E/test_configs/wflow_features/config.get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS.yaml b/tests/WE2E/test_configs/wflow_features/config.get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS.yaml index ca6cf0476b..657e1eceb6 100644 --- a/tests/WE2E/test_configs/wflow_features/config.get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS.yaml @@ -24,4 +24,7 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 FV3GFS_FILE_FMT_LBCS: netcdf task_run_fcst: - DT_ATMOS: 60 + fv3: + model_configure: + update_values: + dt_atmos: 60 diff --git a/ush/ccpp_suites_defaults.yaml b/ush/ccpp_suites_defaults.yaml index fa9f853475..fea9c2b0ff 100644 --- a/ush/ccpp_suites_defaults.yaml +++ b/ush/ccpp_suites_defaults.yaml @@ -23,7 +23,144 @@ orog_gsl_defaults: &orog_gsl_defaults - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} rundir: "{{ task_make_orog.rundir }}/orog_gsl" -FV3_RAP: +run_fcst_hrrr_fix_files: + files_to_link: &FV3_HRRR_fix_files + INPUT/oro_data_ls.nc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}_oro_data_ls.tile7.halo{{ constants.NH0 }}.nc' + INPUT/oro_data_ss.nc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}_oro_data_ss.tile7.halo{{ constants.NH0 }}.nc' + +fv3_nml_gfs_gfdl_cloud_mp_defaults: + gfdl_cloud_microphysics_nml: &gfs_gfdl_cloud_mp + c_cracw: 0.8 + c_paut: 0.5 + c_pgacs: 0.01 + c_psaci: 0.05 + ccn_l: 300.0 + ccn_o: 100.0 + const_vg: False + const_vi: False + const_vr: False + const_vs: False + de_ice: False + do_qa: True + do_sedi_heat: False + dw_land: 0.16 + dw_ocean: 0.1 + fast_sat_adj: True + fix_negative: True + icloud_f: 1 + mono_prof: True + mp_time: 90.0 + prog_ccn: False + qi0_crt: 8.0e-05 + qi_lim: 1.0 + ql_gen: 0.001 + ql_mlt: 0.001 + qs0_crt: 0.001 + rad_graupel: True + rad_rain: True + rad_snow: True + rh_inc: 0.3 + rh_inr: 0.3 + rh_ins: 0.3 + rthresh: 1.0e-05 + sedi_transport: False + tau_g2v: 900.0 + tau_i2s: 1000.0 + tau_l2v: 180.0 + tau_v2l: 90.0 + use_ccn: True + use_ppm: False + vg_max: 12.0 + vi_max: 1.0 + vr_max: 12.0 + vs_max: 2.0 + z_slope_ice: True + z_slope_liq: True + + +FV3_GFS_v15p2: + fv3_namelist_settings: + fv_core_nml: &gfs_v15_fv_core + agrid_vel_rst: False + d2_bg_k1: 0.15 + d2_bg_k2: 0.02 + dnats: !int '{{ 5 if cpl_aqm_parm.CPL_AQM else 1 }}' + do_sat_adj: True + fv_debug: False + fv_sg_adj: 600 + k_split: 1 + kord_mt: 9 + kord_tm: -9 + kord_tr: 9 + kord_wz: 9 + n_split: 8 + n_sponge: 30 + nord_zs_filter: !remove + nudge_qv: True + range_warn: False + rf_cutoff: 750.0 + rf_fast: False + gfdl_cloud_microphysics_nml: + <<: *gfs_gfdl_cloud_mp + sedi_transport: True + tau_l2v: 225.0 + tau_v2l: 150.0 + gfs_physics_nml: &gfs_v15_gfs_physics + bl_mynn_edmf: !remove + bl_mynn_edmf_mom: !remove + bl_mynn_tkeadvect: !remove + cnvcld: True + cnvgwd: True + cplflx: !remove + do_myjpbl: False + do_myjsfc: False + do_mynnedmf: !remove + do_mynnsfclay: !remove + do_tofd: False + do_ugwp: False + do_ysu: False + fhcyc: 0.0 + fhlwr: 3600.0 + fhswr: 3600.0 + hybedmf: True + iau_delthrs: !remove + iaufhrs: !remove + imfdeepcnv: 2 + imfshalcnv: 2 + imp_physics: 11 + icloud_bl: !remove + iopt_alb: 2 + iopt_btr: 1 + iopt_crs: 1 + iopt_dveg: 2 + iopt_frz: 1 + iopt_inf: 1 + iopt_rad: 1 + iopt_run: 1 + iopt_sfc: 1 + iopt_snf: 4 + iopt_stc: 1 + iopt_tbot: 2 + iopt_trs: 2 + ldiag_ugwp: False + lgfdlmprad: True + lradar: !remove + lsm: 1 + lsoil: !remove + lsoil_lsm: !remove + ltaerosol: !remove + shal_cnv: True + shinhong: False + ttendlim: !remove + xkzm_h: 1.0 + xkzm_m: 1.0 + xkzminv: 0.3 + namsfc: + landice: True + ldebug: False + surf_map_nml: !remove + +FV3_GFS_v15_thompson_mynn_lam3km: task_make_ics: chgres_cube: <<: *chgres_cube_gsd_defaults @@ -33,7 +170,139 @@ FV3_RAP: task_make_orog: orog_gsl: <<: *orog_gsl_defaults -FV3_HRRR: + task_run_fcst: + fv3: + files_to_link: *FV3_HRRR_fix_files + fv3_namelist_settings: + atmos_model_nml: + avg_max_length: 3600.0 + fv_core_nml: + agrid_vel_rst: True + full_zs_filter: !remove + n_sponge: 9 + npz_type: '' + rf_fast: False + sg_cutoff: 10000.0 + vtdm4: 0.02 + gfs_physics_nml: + avg_max_length: 3600.0 + cdmbgwd: [0.88, 0.04] + debug: True + do_deep: False + do_gsl_drag_ls_bl: False + do_gsl_drag_ss: True + do_gsl_drag_tofd: True + do_mynnsfclay: True + do_tofd: False + do_ugwp: False + do_ugwp_v0: False + do_ugwp_v0_nst_only: False + do_ugwp_v0_orog_only: False + fhswr: 900.0 + fhlwr: 900.0 + gwd_opt: 2 + iaer: 1011 + iccn: 2 + icliq_sw: 2 + imfdeepcnv: 2 + imfshalcnv: 2 + iopt_alb: 2 + iopt_btr: 1 + iopt_crs: 1 + iopt_dveg: 2 + iopt_frz: 1 + iopt_inf: 1 + iopt_rad: 1 + iopt_run: 1 + iopt_sfc: 1 + iopt_snf: 4 + iopt_stc: 1 + iopt_tbot: 2 + iopt_trs: !remove + iovr: 3 + ldiag_ugwp: False + lgfdlmprad: False + lsm: 1 + lsoil: !remove + lsoil_lsm: !remove + ltaerosol: False + print_diff_pgr: True + sfclay_compute_flux: !remove + xkzminv: 0.3 + xkzm_m: 1.0 + xkzm_h: 1.0 + surf_map_nml: !remove + +FV3_GFS_v16: + fv3_namelist_settings: + cires_ugwp_nml: + launch_level: 27 + fv_core_nml: + <<: *gfs_v15_fv_core + agrid_vel_rst: False + d2_bg_k1: 0.2 + d2_bg_k2: 0.0 + delt_max: 0.002 + dnats: !int '{{ 5 if cpl_aqm_parm.CPL_AQM else 1 }}' + dz_min: 6 + fv_sg_adj: 450 + hord_dp: -5 + hord_mt: 5 + hord_tm: 5 + hord_tr: !int '{{ 8 if cpl_aqm_parm.CPL_AQM else 10 }}' + hord_vt: 5 + k_split: 6 + make_nh: False + n_split: 6 + n_sponge: 10 + na_init: 0 + nord: !int '{{ 2 if cpl_aqm_parm.CPL_AQM else 3 }}' + nudge_dz: False + res_latlon_dynamics: '' + rf_fast: !remove + tau: 10.0 + gfdl_cloud_microphysics_nml: + <<: *gfs_gfdl_cloud_mp + mp_time: 150.0 + reiflag: 2 + sedi_transport: True + tau_l2v: 225.0 + tau_v2l: 150.0 + gfs_physics_nml: + <<: *gfs_v15_gfs_physics + cdmbgwd: [4.0, 0.15, 1.0, 1.0] + do_myjpbl: !remove + do_myjsfc: !remove + do_tofd: True + do_ysu: !remove + hybedmf: False + iaer: 5111 + icliq_sw: 2 + iopt_dveg: 1 + iovr: 3 + isatmedmf: 1 + lgfdlmprad: True + lheatstrg: True + lndp_type: !remove + lsoil: 4 + n_var_lndp: !remove + prautco: [0.00015, 0.00015] + psautco: [0.0008, 0.0005] + satmedmf: True + shinhong: !remove + xkzminv: !remove + xkzm_m: !remove + xkzm_h: !remove + mpp_io_nml: + deflate_level: 1 + shuffle: 1 + namsfc: + landice: True + ldebug: False + surf_map_nml: !remove + + +FV3_GFS_v17_p8: task_make_ics: chgres_cube: <<: *chgres_cube_gsd_defaults @@ -43,37 +312,264 @@ FV3_HRRR: task_make_orog: orog_gsl: <<: *orog_gsl_defaults -FV3_WoFS_v0: + task_run_fcst: + fv3: + files_to_link: *FV3_HRRR_fix_files + fv3_namelist_settings: + cires_ugwp_nml: + launch_level: 27 + fv_core_nml: + <<: *gfs_v15_fv_core + agrid_vel_rst: False + d2_bg_k1: 0.2 + d2_bg_k2: 0.0 + dnats: !int '{{ 4 if cpl_aqm_parm.CPL_AQM else 0 }}' + do_sat_adj: False + fv_sg_adj: 450 + hord_dp: -5 + hord_mt: 5 + hord_tm: 5 + hord_tr: 8 + hord_vt: 5 + k_split: 6 + make_nh: True + n_split: 6 + n_sponge: 10 + na_init: 1 + nord: !int '{{ 2 if cpl_aqm_parm.CPL_AQM else 1 }}' + nudge_dz: False + res_latlon_dynamics: '' + rf_fast: !remove + tau: 10.0 + gfs_physics_nml: + cdmbgwd: [4.0, 0.05, 1.0, 1.0] + cnvcld: True + cnvgwd: True + decfl: 10 + do_deep: True + do_gsl_drag_ls_bl: False + do_gsl_drag_ss: True + do_gsl_drag_tofd: False + do_mynnedmf: False + do_mynnsfclay: False + do_tofd: False + do_ugwp: False + do_ugwp_v0: True + do_ugwp_v0_orog_only: False + do_ugwp_v0_nst_only: False + do_ugwp_v1: False + do_ugwp_v1_orog_only: False + dt_inner: 150.0 + fhlwr: 1200.0 + fhswr: 1200.0 + frac_grid: False + gwd_opt: 2 + iaer: 1011 + ialb: 2 + icliq_sw: 2 + iems: 2 + imfdeepcnv: 2 + imfshalcnv: 2 + iopt_alb: 1 + iopt_btr: 1 + iopt_crs: 2 + iopt_dveg: 4 + iopt_frz: 1 + iopt_inf: 1 + iopt_rad: 3 + iopt_run: 1 + iopt_sfc: 3 + iopt_snf: 4 + iopt_stc: 3 + iopt_tbot: 2 + iovr: 3 + isatmedmf: 1 + ldiag_ugwp: False + lseaspray: True + lgfdlmprad: False + lheatstrg: False + lradar: False + lsm: 2 + lsoil_lsm: 4 + ltaerosol: False + min_lakeice: 0.15 + min_seaice: 0.15 + qdiag3d: False + ras: False + satmedmf: True + sedi_semi: True + shal_cnv: True + mpp_io_nml: + deflate_level: 1 + shuffle: 1 + surf_map_nml: !remove + + +FV3_HRRR: + task_make_orog: + orog_gsl: + <<: *orog_gsl_defaults task_make_ics: chgres_cube: <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: <<: *chgres_cube_gsd_defaults -FV3_RRFS_v1beta: + task_run_fcst: + fv3: + files_to_link: *FV3_HRRR_fix_files + fv3_namelist_settings: + fv3_namelist_settings: + fv_core_nml: &HRRR_fv_core + hord_dp: 6 + hord_mt: 6 + hord_tm: 6 + hord_vt: 6 + hord_tr: 8 + kord_mt: 9 + kord_tm: -9 + kord_tr: 9 + kord_wz: 9 + nord_tr: 0 + nrows_blend: 20 + d_con: 0.5 + n_sponge: 9 + gfs_physics_nml: &FV3_HRRR_Phys + cdmbgwd: [3.5, 1.0] + diag_log: True + do_deep: False + do_gsl_drag_ss: True + do_gsl_drag_tofd: True + do_gsl_drag_ls_bl: True + do_mynnsfclay: True + gwd_opt: 3 + iaer: 5111 + ialb: 2 + iems: 2 + icliq_sw: 2 + imfdeepcnv: -1 + imfshalcnv: -1 + iopt_alb: 2 + iopt_btr: 1 + iopt_crs: 1 + iopt_dveg: 2 + iopt_frz: 1 + iopt_inf: 1 + iopt_rad: 1 + iopt_run: 1 + iopt_sfc: 1 + iopt_snf: 4 + iopt_stc: 1 + iopt_tbot: 2 + iopt_trs: 2 + iovr: 3 + isncond_opt: 2 + isncovr_opt: 3 + lsm: 3 + lsoil: !int '{{ 9 if task_get_extrn_ics.envvars.EXTRN_MDL_NAME_ICS in ("RAP", "HRRR") else 4 }}' + lsoil_lsm: 9 + mosaic_lu: 0 + mosaic_soil: 0 + nst_anl: !remove + nstf_name: !remove + sfclay_compute_flux: True + thsfc_loc: False + +FV3_RAP: + task_make_orog: + orog_gsl: + <<: *orog_gsl_defaults task_make_ics: chgres_cube: <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: <<: *chgres_cube_gsd_defaults -FV3_GFS_v15_thompson_mynn_lam3km: + task_run_fcst: + fv3: + files_to_link: *FV3_HRRR_fix_files + fv3_namelist_settings: + fv_core_nml: + <<: *HRRR_fv_core + gfs_physics_nml: + <<: *FV3_HRRR_Phys + do_deep: True + shal_cnv: True + imfdeepcnv: 3 + imfshalcnv: 3 + diag_log: True + ialb: 1 + iems: 1 + isncond_opt: !remove + isncovr_opt: !remove + mosaic_lu: !remove + mosaic_soil: !remove + thsfc_loc: !remove + +FV3_RRFS_v1beta: task_make_ics: chgres_cube: <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: <<: *chgres_cube_gsd_defaults - task_make_orog: - orog_gsl: - <<: *orog_gsl_defaults -FV3_GFS_v17_p8: + fv3_namelist_settings: + gfs_physics_nml: + do_deep: False + do_mynnsfclay: True + imfdeepcnv: -1 + imfshalcnv: -1 + iopt_alb: 2 + iopt_btr: 1 + iopt_crs: 1 + iopt_dveg: 2 + iopt_frz: 1 + iopt_inf: 1 + iopt_rad: 1 + iopt_run: 1 + iopt_sfc: 1 + iopt_snf: 4 + iopt_stc: 1 + iopt_tbot: 2 + iopt_trs: 2 + lsm: 2 + lsoil_lsm: 4 + +FV3_WoFS_v0: task_make_ics: chgres_cube: <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: <<: *chgres_cube_gsd_defaults - task_make_orog: - orog_gsl: - <<: *orog_gsl_defaults + fv3_namelist_settings: + gfs_physics_nml: + do_deep: False + imfdeepcnv: 0 + imfshalcnv: 0 + iopt_alb: 2 + iopt_btr: 1 + iopt_crs: 1 + iopt_dveg: 2 + iopt_frz: 1 + iopt_inf: 1 + iopt_rad: 1 + iopt_run: 1 + iopt_sfc: 1 + iopt_snf: 4 + iopt_stc: 1 + iopt_tbot: 2 + do_mynnsfclay: True + imfdeepcnv: -1 + imfshalcnv: -1 + lsm: 1 + lsoil_lsm: 4 + imp_physics: 17 + nssl_cccn: 0.6e+9 + nssl_hail_on: True + nssl_ccn_on: True + fv_core_nml: + nwat: 7 + fv_diagnostics_nml: + do_hailcast: True + diff --git a/ush/config.community.yaml b/ush/config.community.yaml index 8b224e9a9c..cc1d35bb22 100644 --- a/ush/config.community.yaml +++ b/ush/config.community.yaml @@ -30,6 +30,9 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 FV3GFS_FILE_FMT_LBCS: grib2 task_run_fcst: - walltime: 02:00:00 + fv3: + execution: + batchargs: + walltime: 02:00:00 verification: VX_FCST_MODEL_NAME: FV3_GFS_v16_CONUS_25km diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 92334eb2e5..89a9999bbe 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -533,19 +533,6 @@ workflow: # #----------------------------------------------------------------------- # - # Set the separator character(s) to use in the names of the grid, mosaic, - # and orography fixed files. - # - # Ideally, the same separator should be used in the names of these fixed - # files as in the surface climatology fixed files (which always use a "." - # as the separator), i.e. ideally, DOT_OR_USCORE should be set to "." - # - #----------------------------------------------------------------------- - # - DOT_OR_USCORE: "_" - # - #----------------------------------------------------------------------- - # # Set file names. Definitions: # # EXPT_CONFIG_FN: @@ -559,65 +546,6 @@ workflow: # Name of file containing the namelist settings for the code that generates # a "ESGgrid" type of regional grid. # - # FV3_NML_FN: - # Name of the forecast model's namelist file. It includes the information in - # FV3_NML_BASE_SUITE_FN (i.e., input.nml.FV3), FV3_NML_YAML_CONFIG_FN (i.e., FV3.input.yml), and the user configuration file (i.e., config.yaml). - # - # FV3_NML_BASE_SUITE_FN: - # Name of Fortran namelist file containing the forecast model's base suite - # namelist, i.e. the portion of the namelist that is common to all physics - # suites. - # - # FV3_NML_YAML_CONFIG_FN: - # Name of YAML configuration file containing the forecast model's namelist - # settings for various physics suites. - # - # FV3_NML_BASE_ENS_FN: - # Name of Fortran namelist file containing the forecast model's base - # ensemble namelist, i.e. the the namelist file that is the starting point - # from which the namelist files for each of the enesemble members are - # generated. - # - # FV3_EXEC_FN: - # Name to use for the forecast model executable when it is copied from - # the directory in which it is created in the build step to the executables - # directory (EXECDIR; this is set during experiment generation). - # - # DATA_TABLE_FN: - # Name of the file that contains the data table read in by the forecast model. - # - # DIAG_TABLE_FN: - # Prefix for the name of the file that specifies - # the output fields of the forecast model. - # - # FIELD_TABLE_FN: - # Prefix for the name of the file that specifies - # the tracers that the forecast model will read in from the IC/LBC files. - # - # DIAG_TABLE_TMPL_FN: - # Name of a template file that specifies the output fields of the - # forecast model (ufs-weather-model: diag_table) followed by the name - # of the ccpp_phys_suite. Its default value is the name of the file - # that the ufs weather model - # expects to read in. - # - # FIELD_TABLE_TMPL_FN: - # Name of a template file that specifies the tracers in IC/LBC files of the - # forecast model (ufs-weather-mode: field_table) followed by [dot_ccpp_phys_suite]. - # Its default value is the name of the file that the ufs weather model expects - # to read in. - # - # MODEL_CONFIG_FN: - # Name of a template file that contains settings and configurations for the - # NUOPC/ESMF main component (ufs-weather-model: model_config). Its default - # value is the name of the file that the ufs weather model expects to read in. - # - # UFS_CONFIG_FN: - # Name of a template file that contains information about the various UFS - # components and their run sequence (ufs-weather-model: ufs.configure). - # Its default value is the name of the file that the ufs weather model expects - # to read in. - # # AQM_RC_FN: # Name of resource file for NOAA Air Quality Model (AQM). # @@ -627,51 +555,14 @@ workflow: #----------------------------------------------------------------------- # EXPT_CONFIG_FN: "config.yaml" - CONSTANTS_FN: "constants.yaml" - + RGNL_GRID_NML_FN: "regional_grid.nml" - - FV3_NML_FN: "input.nml" - FV3_NML_BASE_SUITE_FN: "{{ workflow.FV3_NML_FN }}.FV3" - FV3_NML_YAML_CONFIG_FN: "FV3.input.yml" - FV3_NML_BASE_ENS_FN: "{{ workflow.FV3_NML_FN }}.base_ens" - FV3_EXEC_FN: "ufs_model" - DATA_TABLE_FN: "data_table" - DIAG_TABLE_FN: "diag_table" - FIELD_TABLE_FN: "field_table" - DIAG_TABLE_TMPL_FN: 'diag_table.{{ workflow.CCPP_PHYS_SUITE }}' - FIELD_TABLE_TMPL_FN: 'field_table.{{ workflow.CCPP_PHYS_SUITE }}' - MODEL_CONFIG_FN: "model_configure" - UFS_CONFIG_FN: "ufs.configure" AQM_RC_FN: "aqm.rc" AQM_RC_TMPL_FN: "aqm.rc" # #----------------------------------------------------------------------- - # FV3_NML_BASE_SUITE_FP: - # Path to the FV3_NML_BASE_SUITE_FN file. - # - # FV3_NML_YAML_CONFIG_FP: - # Path to the FV3_NML_YAML_CONFIG_FN file. - # - # FV3_NML_BASE_ENS_FP: - # Path to the FV3_NML_BASE_ENS_FN file. - # - # DATA_TABLE_TMPL_FP: - # Path to the DATA_TABLE_FN file. - # - # DIAG_TABLE_TMPL_FP: - # Path to the DIAG_TABLE_TMPL_FN file. - # - # FIELD_TABLE_TMPL_FP: - # Path to the FIELD_TABLE_TMPL_FN file. - # - # MODEL_CONFIG_TMPL_FP: - # Path to the MODEL_CONFIG_FN file. - # - # UFS_CONFIG_TMPL_FP: - # Path to the UFS_CONFIG_FN file. # # AQM_RC_TMPL_FP: # Path to the AQM_RC_TMPL_FN file. @@ -679,32 +570,12 @@ workflow: #----------------------------------------------------------------------- # - FV3_NML_BASE_SUITE_FP: '{{ user.PARMdir }}/{{ workflow.FV3_NML_BASE_SUITE_FN }}' - FV3_NML_YAML_CONFIG_FP: '{{ user.PARMdir }}/{{ workflow.FV3_NML_YAML_CONFIG_FN }}' - FV3_NML_BASE_ENS_FP: '{{ workflow.EXPTDIR }}/{{ workflow.FV3_NML_BASE_ENS_FN }}' - DATA_TABLE_TMPL_FP: '{{ user.PARMdir }}/{{ workflow.DATA_TABLE_FN }}' - DIAG_TABLE_TMPL_FP: '{{ user.PARMdir }}/{{ workflow.DIAG_TABLE_TMPL_FN }}' - FIELD_TABLE_TMPL_FP: '{{ user.PARMdir }}/{{ workflow.FIELD_TABLE_TMPL_FN }}' - MODEL_CONFIG_TMPL_FP: '{{ user.PARMdir }}/{{ workflow.MODEL_CONFIG_FN }}' - UFS_CONFIG_TMPL_FP: '{{ user.PARMdir }}/{{ workflow.UFS_CONFIG_FN }}' AQM_RC_TMPL_FP: '{{ user.PARMdir }}/{{ workflow.AQM_RC_TMPL_FN }}' # #----------------------------------------------------------------------- # These are staged in the exptdir at configuration time # - # DATA_TABLE_FP: - # Path to the data table in the experiment directory. - # - # FIELD_TABLE_FP: - # Path to the field table in the experiment directory. - # - # UFS_CONFIG_FP: - # Path to the UFS_CONFIG_FN file in the experiment directory. - # - # FV3_NML_FP: - # Path to the FV3_NML_FN file in the experiment directory. - # # FCST_MODEL: # Name of forecast model (default=ufs-weather-model) # @@ -757,12 +628,6 @@ workflow: # #----------------------------------------------------------------------- # - DATA_TABLE_FP: '{{ workflow.EXPTDIR }}/{{ workflow.DATA_TABLE_FN }}' - FIELD_TABLE_FP: '{{ workflow.EXPTDIR }}/{{ workflow.FIELD_TABLE_FN }}' - UFS_CONFIG_FP: '{{ workflow.EXPTDIR }}/{{ workflow.UFS_CONFIG_FN }}' - FV3_NML_FP: '{{ workflow.EXPTDIR }}/{{ workflow.FV3_NML_FN }}' - FV3_NML_STOCH_FP: '{{ workflow.EXPTDIR }}/{{ workflow.FV3_NML_FN }}_stoch' - FCST_MODEL: "ufs-weather-model" WFLOW_XML_FN: "FV3LAM_wflow.xml" GLOBAL_VAR_DEFNS_FN: "var_defns.yaml" @@ -846,7 +711,6 @@ workflow: CCPP_PHYS_SUITE_FN: 'suite_{{ workflow.CCPP_PHYS_SUITE }}.xml' CCPP_PHYS_SUITE_IN_CCPP_FP: '{{ user.UFS_WTHR_MDL_DIR }}/FV3/ccpp/suites/{{ workflow.CCPP_PHYS_SUITE_FN }}' CCPP_PHYS_SUITE_FP: '{{ workflow.EXPTDIR }}/{{ workflow.CCPP_PHYS_SUITE_FN }}' - CCPP_PHYS_DIR: '{{ user.UFS_WTHR_MDL_DIR }}/FV3/ccpp/physics/physics/SFC_Models/Land/Noahmp' # #----------------------------------------------------------------------- # @@ -1490,8 +1354,8 @@ task_make_sfc_climo: stdout: "{{ user.HOMEdir }}/scripts" walltime: 00:05:00 envcmds: - - module load build_{{ user.MACHINE|lower() }}_{{ workflow.COMPILER }} - module use {{ user.HOMEdir }}/modulefiles + - module load build_{{ user.MACHINE|lower() }}_{{ workflow.COMPILER }} executable: "{{ user.EXECdir }}/sfc_climo_gen" mpicmd: '{{ platform.BATCH_RUN_CMD }}' namelist: @@ -1507,10 +1371,10 @@ task_make_sfc_climo: input_vegetation_greenness_file: "{{ platform.FIXsfc }}/vegetation_greenness.0.144.nc" input_vegetation_type_file: "{{ platform.FIXsfc }}/vegetation_type.igbp.0.05.nc" maximum_snow_albedo_method: "bilinear" - mosaic_file_mdl: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" + mosaic_file_mdl: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}_mosaic.halo{{ constants.NH4 }}.nc" orog_dir_mdl: "{{ workflow.FIXlam }}" orog_files_mdl: - - "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + - "{{ 'CRES' | env }}_oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" snowfree_albedo_method: "bilinear" vegetation_greenness_method: "bilinear" rundir: '{{ workflow.EXPTDIR }}/sfc_climo' @@ -1662,7 +1526,7 @@ task_get_extrn_lbcs: envvars: EXTRN_MDL_NAME_LBCS: "FV3GFS" LBC_SPEC_INTVL_HRS: 6 - EXTRN_MDL_LBCS_OFFSET_HRS: '{{ 3 if task_get_extrn_lbcs.envvars.EXTRN_MDL_NAME_LBCS == "RAP" else 0 }}' + EXTRN_MDL_LBCS_OFFSET_HRS: !int '{{ 3 if task_get_extrn_lbcs.envvars.EXTRN_MDL_NAME_LBCS == "RAP" else 0 }}' FV3GFS_FILE_FMT_LBCS: "nemsio" # #----------------------------------------------------------------------- @@ -1745,7 +1609,7 @@ task_make_ics: vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" vgtyp_from_climo: true vgfrc_from_climo: true - rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' + rundir: '{{ task_run_fcst.fv3.rundir }}/tmp_MAKE_ICS' # The location of the metadata path created by retrieve_data.py input_files_metadata_path: '{{ task_get_extrn_ics.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.yaml' @@ -1786,7 +1650,7 @@ task_make_lbcs: regional: 2 varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" - rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS_{{ timevars.yyyymmddhh }}' + rundir: '{{ task_run_fcst.fv3.rundir}}/tmp_MAKE_LBCS_{{ timevars.yyyymmddhh }}' # The location of the metadata path created by retrieve_data.py input_files_metadata_path: '{{ task_get_extrn_lbcs.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.yaml' @@ -1797,111 +1661,156 @@ task_make_lbcs: '{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_bndy.tile7.f{{ "%03d" % (leadtime.total_seconds() // 3600) }}.nc': gfs.bndy.nc #---------------------------- -# IO_LAYOUT_Y FORECAST config parameters +# FORECAST config parameters #----------------------------- task_run_fcst: - # UW Placeholder additions - rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' - walltime: 04:30:00 - #------------------------------------------------------------------------ - # - # NNODES_RUN_FCST: - # The number of nodes to request from the job scheduler - # for the forecast task. - # - # PPN_RUN_FCST: - # Processes per node for the forecast task. - # - # FV3_EXEC_FP: - # Full path to the forecast model executable. - # - # IO_LAYOUT_Y: - # Specifies how many MPI ranks to use in the Y direction for - # input/output (I/O). (X direction is assumed to be 1.) - # - #------------------------------------------------------------------------ - NNODES_RUN_FCST: !int '{{ (task_run_fcst.PE_MEMBER01 + task_run_fcst.PPN_RUN_FCST - 1) // task_run_fcst.PPN_RUN_FCST }}' - PPN_RUN_FCST: '{{ platform.NCORES_PER_NODE // task_run_fcst.OMP_NUM_THREADS_RUN_FCST }}' - FV3_EXEC_FP: '{{ user.EXECdir }}/{{ workflow.FV3_EXEC_FN }}' - IO_LAYOUT_Y: 1 - # - #----------------------------------------------------------------------- - # - # KMP_AFFINITY_*: - # From Intel: "The Intel® runtime library has the ability to bind OpenMP - # threads to physical processing units. The interface is controlled using - # the KMP_AFFINITY environment variable. Depending on the system (machine) - # topology, application, and operating system, thread affinity can have a - # dramatic effect on the application speed. - # - # Thread affinity restricts execution of certain threads (virtual execution - # units) to a subset of the physical processing units in a multiprocessor - # computer. Depending upon the topology of the machine, thread affinity can - # have a dramatic effect on the execution speed of a program." - # - # For more information, see the following link: - # https://software.intel.com/content/www/us/en/develop/documentation/cpp- - # compiler-developer-guide-and-reference/top/optimization-and-programming- - # guide/openmp-support/openmp-library-support/thread-affinity-interface- - # linux-and-windows.html - # - # OMP_NUM_THREADS_RUN_FCST: - # The number of OpenMP threads to use for parallel regions. - # - # OMP_STACKSIZE_RUN_FCST: - # Controls the size of the stack for threads created by the OpenMP - # implementation. - # - #----------------------------------------------------------------------- - # - KMP_AFFINITY_RUN_FCST: "scatter" - OMP_NUM_THREADS_RUN_FCST: 2 # ATM_omp_num_threads in ufs.configure - OMP_STACKSIZE_RUN_FCST: "1024m" - # - #----------------------------------------------------------------------- - # - # Set model_configure parameters. Definitions: - # - # DT_ATMOS: - # The main forecast model integration time step. As described in the - # forecast model documentation, "It corresponds to the frequency with - # which the top level routine in the dynamics is called as well as the - # frequency with which the physics is called." - # - # FHROT: - # Forecast hour at restart - # - # RESTART_INTERVAL: - # frequency of the output restart files (unit:hour). - # Default=0: restart files are produced at the end of a forecast run - # For example, i) RESTART_INTERVAL: 1 -1 => restart files are produced - # every hour with the prefix "YYYYMMDD.HHmmSS." in the RESTART directory - # ii) RESTART_INTERVAL: 1 2 5 => restart files are produced only when - # fh = 1, 2, and 5. - # - # WRITE_DOPOST: - # Flag that determines whether or not to use the inline post feature - # [i.e. calling the Unified Post Processor (UPP) from within the - # weather model]. If this is set to true, the the run_post task will - # be deactivated. - # - # ITASKS: - # Variable denoting the number of write tasks in the i direction in the - # current group. Used for inline post 2D decomposition. Setting this - # variable to a value greater than 1 will enable 2D decomposition. - # Default setting is 1. - # Note that 2D decomposition does not yet work with GNU compilers, so this value - # will be reset to 1 automatically when using GNU compilers (i.e., when COMPILER: gnu). - # - #----------------------------------------------------------------------- - # - DT_ATMOS: "" - FHROT: 0 - RESTART_INTERVAL: 0 - WRITE_DOPOST: false - ITASKS: 1 - # - #----------------------------------------------------------------------- + fv3: + diag_table: + template_file: '{{ user.PARMdir }}/diag_table.{{ workflow.CCPP_PHYS_SUITE }}' + template_values: + cres: "{{ 'CRES' | env }}" + domain: regional + execution: + batchargs: + walltime: 04:30:00 + tasks_per_node: !int '{{ platform.NCORES_PER_NODE // task_run_fcst.fv3.execution.threads }}' + nodes: !int '{{ (task_run_fcst.PE_MEMBER01 + task_run_fcst.fv3.execution.batchargs.tasks_per_node - 1) // task_run_fcst.fv3.execution.batchargs.tasks_per_node }}' + envcmds: + - module use {{ user.HOMEdir }}/modulefiles + - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} + - export ESMF_RUNTIME_PROFILE={{ "ON" if task_run_fcst.PRINT_ESMF }} + - export ESMF_RUNTIME_PROFILE_OUTPUT={{ "SUMMARY" if task_run_fcst.PRINT_ESMF }} + executable: '{{ user.EXECdir }}/ufs_model' + mpicmd: '{{ platform.BATCH_RUN_CMD }}' + threads: 2 + field_table: + base_file: '{{ user.PARMdir }}/field_table.{{ workflow.CCPP_PHYS_SUITE }}' + files_to_copy: + INPUT/gfs_ctrl.nc: '{{ task_run_fcst.fv3.rundir }}/INPUT/{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_ctrl.nc' + INPUT/gfs_data.nc: '{{ task_run_fcst.fv3.rundir }}/INPUT/{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + INPUT/sfc_data.nc: '{{ task_run_fcst.fv3.rundir }}/INPUT/{{ nco.NET_default }}.t{{ timevars.hh }}z.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + noahmptable.tbl: '{{ user.UFS_WTHR_MDL_DIR }}/FV3/ccpp/physics/physics/SFC_Models/Land/Noahmp/noahmptable.tbl' + files_to_link: + aerosol.dat: '{{ workflow.FIXam }}/global_climaeropac_global.txt' + co2historicaldata_2010.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2010.txt' + co2historicaldata_2011.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2011.txt' + co2historicaldata_2012.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2012.txt' + co2historicaldata_2013.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2013.txt' + co2historicaldata_2014.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2014.txt' + co2historicaldata_2015.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2015.txt' + co2historicaldata_2016.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2016.txt' + co2historicaldata_2017.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2017.txt' + co2historicaldata_2018.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2018.txt' + co2historicaldata_2019.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2019.txt' + co2historicaldata_2020.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2020.txt' + co2historicaldata_2021.txt: '{{ workflow.FIXam }}/fix_co2_proj/global_co2historicaldata_2021.txt' + co2historicaldata_glob.txt: '{{ workflow.FIXam }}/global_co2historicaldata_glob.txt' + co2monthlycyc.txt: '{{ workflow.FIXam }}/co2monthlycyc.txt' + global_h2oprdlos.f77: '{{ workflow.FIXam }}/global_h2o_pltc.f77' + global_albedo4.1x1.grb: '{{ workflow.FIXam }}/global_albedo4.1x1.grb' + global_zorclim.1x1.grb: '{{ workflow.FIXam }}/global_zorclim.1x1.grb' + global_tg3clim.2.6x1.5.grb: '{{ workflow.FIXam }}/global_tg3clim.2.6x1.5.grb' + sfc_emissivity_idx.txt: '{{ workflow.FIXam }}/global_sfc_emissivity_idx.txt' + solarconstant_noaa_an.txt: '{{ workflow.FIXam }}/global_solarconstant_noaa_an.txt' + global_o3prdlos.f77: '{{ workflow.FIXam }}/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77' + fd_ufs.yaml: '{{ user.UFS_WTHR_MDL_DIR }}/tests/parm/fd_ufs.yaml' + INPUT/grid_spec.nc: '{{ task_make_grid.GRID_DIR }}/{{ "CRES" | env }}_mosaic.halo{{ constants.NH3 }}.nc' + INPUT/grid.tile7.halo4.nc: '{{ task_make_grid.GRID_DIR }}/{{ "CRES" | env }}_grid.tile7.halo{{ constants.NH4 }}.nc' + INPUT/{{ "CRES" | env }}_grid.tile7.halo{{ constants.NH3 }}.nc: '{{ task_make_grid.GRID_DIR }}/{{ "CRES" | env }}_grid.tile7.halo{{ constants.NH3 }}.nc' + INPUT/oro_data.nc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}_oro_data.tile7.halo{{ constants.NH0 }}.nc' + INPUT/oro_data.tile7.halo{{ constants.NH4 }}.nc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}_oro_data.tile7.halo{{ constants.NH4 }}.nc' + lateral_boundary_conditions: + interval_hours: !int '{{ task_get_extrn_lbcs.envvars.LBC_SPEC_INTVL_HRS }}' + offset: 0 + path: '{{ task_run_fcst.fv3.rundir }}/INPUT/{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_bndy.tile{tile}.f{forecast_hour:03d}.nc' + length: !int '{{ workflow.FCST_LEN_HRS }}' + model_configure: + update_values: + start_year: !int '{{ cycle.year }}' + start_month: !int '{{ cycle.month }}' + start_day: !int '{{ cycle.day }}' + start_hour: !int '{{ cycle.hour }}' + start_minute: !int '{{ cycle.minute }}' + start_second: !int '{{ cycle.second }}' + nhours_fcst: !int '{{ workflow.FCST_LEN_HRS }}' + fhrot: 0 + run_continue: false + ens_sps: false + dt_atmos: !int '{{ task_run_fcst.DT_ATMOS // 1 }}' + calendar: julian + memuse_verbose: false + restart_interval: 0 + output_1st_tstep_rst: false + write_dopost: false + ideflate: 0 + nbits: 0 + ichunk2d: -1 + jchunk2d: -1 + ichunk3d: -1 + jchunk3d: -1 + kchunk3d: -1 + itasks: 1 + quilting: true + write_groups: !int '{{ task_run_fcst.WRTCMP_write_groups // 1 }}' + write_tasks_per_group: !int '{{ task_run_fcst.WRTCMP_write_tasks_per_group // 1 }}' + num_files: 2 + filename_base: 'dyn phy' + output_file: 'netcdf netcdf' + output_fh: '1 -1' + nsout: -1 + output_grid: '{{ task_run_fcst.WRTCMP_output_grid }}' + namelist: + base_file: '{{ workflow.EXPTDIR }}/input.nml.{{ workflow.CCPP_PHYS_SUITE }}' + update_values: + atmos_model_nml: + blocksize: !int '{{ task_run_fcst.BLOCKSIZE // 1 }}' + ccpp_suite: '{{ workflow.CCPP_PHYS_SUITE }}' + fv_core_nml: + target_lon: !float '{{ grid_params.LON_CTR }}' + target_lat: !float '{{ grid_params.LAT_CTR }}' + nrows_blend: !int '{{ global.HALO_BLEND }}' + stretch_fac: !float '{{ grid_params.STRETCH_FAC }}' + npx: !int '{{ grid_params.NX + 1 }}' + npy: !int '{{ grid_params.NY + 1 }}' + layout: + - !int '{{ task_run_fcst.LAYOUT_X // 1 }}' + - !int '{{ task_run_fcst.LAYOUT_Y // 1 }}' + bc_update_interval: !int '{{ task_get_extrn_lbcs.envvars.LBC_SPEC_INTVL_HRS }}' + gfs_physics_nml: + kice: !int '{{ 9 if workflow.SDF_USES_RUC_LSM else 2 }}' + print_diff_pgr: false + namsfc: + # From sfc_climo_gen + fnalbc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.snowfree_albedo.tileX.nc' + fnalbc2: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.facsf.tileX.nc' + fntg3c: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.substrate_temperature.tileX.nc' + fnvegc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.vegetation_greenness.tileX.nc' + fnvetc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.vegetation_type.tileX.nc' + fnsotc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.soil_type.tileX.nc' + fnvmnc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.vegetation_greenness.tileX.nc' + fnvmxc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.vegetation_greenness.tileX.nc' + fnslpc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.slope_type.tileX.nc' + fnabsc: '{{ workflow.FIXlam }}/{{ "CRES" | env }}.maximum_snow_albedo.tileX.nc' + # General fix files + fnglac: '{{ workflow.FIXam }}/global_glacier.2x2.grb' + fnmxic: '{{ workflow.FIXam }}/global_maxice.2x2.grb' + fntsfc: '{{ workflow.FIXam }}/RTGSST.1982.2012.monthly.clim.grb' + fnsnoc: '{{ workflow.FIXam }}/global_snoclim.1.875.grb' + fnzorc: '{{ workflow.FIXam }}/vegetation_type.igbp.0.05.nc' + fnaisc: '{{ workflow.FIXam }}/CFSR.SEAICE.1982.2012.monthly.clim.grb' + fnsmcc: '{{ workflow.FIXam }}/global_soilmgldas.t126.384.190.grb' + fnmskh: '{{ workflow.FIXam }}/seaice_newland.grb' + validate: true + rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' + ufs_configure: + template_file: '{{ user.PARMdir }}/ufs.configure' + template_values: + dt_atmos: '{{ task_run_fcst.fv3.model_configure.update_values.dt_atmos // 1 }}' + print_esmf: !bool '{{ task_run_fcst.PRINT_ESMF }}' + cpl_aqm: !bool '{{ cpl_aqm_parm.CPL_AQM }}' + pe_member01_m1: '{{ task_run_fcst.PE_MEMBER01 - 1 }}' + aqm_pe_member01_m1: '{{ ( task_run_fcst.LAYOUT_X * task_run_fcst.LAYOUT_Y ) - 1 }}' + atm_omp_num_threads: '{{ task_run_fcst.fv3.execution.threads }}' # # Set computational parameters for the forecast. Definitions: # @@ -1932,9 +1841,10 @@ task_run_fcst: # #----------------------------------------------------------------------- # - LAYOUT_X: '{{ task_run_fcst.LAYOUT_X }}' - LAYOUT_Y: '{{ task_run_fcst.LAYOUT_Y }}' - BLOCKSIZE: '{{ task_run_fcst.BLOCKSIZE }}' + LAYOUT_X: + LAYOUT_Y: + BLOCKSIZE: + DT_ATMOS: # #----------------------------------------------------------------------- # @@ -1950,14 +1860,6 @@ task_run_fcst: # # Definitions: # - # QUILTING: - # Flag that determines whether or not to use the write component for - # writing output files to disk. The regional grid requires the use of - # the write component, so users should not change the default value. - # When set to true, the forecast model will output files named dynf$HHH.nc - # and phyf$HHH.nc (where HHH is the 3-digit forecast hour) containing dynamics - # and physics fields, respectively, on the write-component grid. - # # PRINT_ESMF: # Flag that determines whether to output extra (debugging) information from # ESMF routines. Must be true or false. Note that the write @@ -2048,15 +1950,13 @@ task_run_fcst: # #----------------------------------------------------------------------- # - QUILTING: true PRINT_ESMF: false - PE_MEMBER01: '{{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST * (task_run_fcst.LAYOUT_Y * task_run_fcst.LAYOUT_X + task_run_fcst.WRTCMP_write_groups * task_run_fcst.WRTCMP_write_tasks_per_group) if task_run_fcst.QUILTING else task_run_fcst.OMP_NUM_THREADS_RUN_FCST * task_run_fcst.LAYOUT_Y * task_run_fcst.LAYOUT_X}}' + PE_MEMBER01: !int '{{ task_run_fcst.fv3.execution.threads * (task_run_fcst.LAYOUT_Y * task_run_fcst.LAYOUT_X + task_run_fcst.WRTCMP_write_groups // 1 * task_run_fcst.WRTCMP_write_tasks_per_group // 1 ) if task_run_fcst.fv3.model_configure.update_values.quilting else task_run_fcst.fv3.execution.threads * task_run_fcst.LAYOUT_Y * task_run_fcst.LAYOUT_X}}' WRTCMP_write_groups: "" WRTCMP_write_tasks_per_group: "" - WRTCMP_output_grid: "''" WRTCMP_cen_lon: "" WRTCMP_cen_lat: "" WRTCMP_lon_lwr_left: "" @@ -2082,12 +1982,6 @@ task_run_fcst: # #----------------------------------------------------------------------- # - # USE_MERRA_CLIMO: Flag that determines whether MERRA2 aerosol climatology - # data and lookup tables for optics properties are obtained - # - #----------------------------------------------------------------------- - # - USE_MERRA_CLIMO: '{{ workflow.CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km" or workflow.CCPP_PHYS_SUITE == "FV3_GFS_v17_p8" }}' # #----------------------------------------------------------------------- # @@ -2124,15 +2018,15 @@ task_run_post: update_values: model_inputs: datestr: "{{ timevars.valid_time }}" - filename: '{{ task_run_fcst.rundir }}/dynf{{ timevars.fff }}.nc' - filenameflux: '{{ task_run_fcst.rundir }}/phyf{{ timevars.fff }}.nc' + filename: '{{ task_run_fcst.fv3.rundir }}/dynf{{ timevars.fff }}.nc' + filenameflux: '{{ task_run_fcst.fv3.rundir }}/phyf{{ timevars.fff }}.nc' grib: grib2 ioform: netcdf modelname: FV3R nampgb: kpo: 47 po: [1000.,975.,950.,925.,900.,875.,850.,825.,800.,775.,750.,725.,700.,675.,650.,625.,600.,575.,550.,525.,500.,475.,450.,425.,400.,375.,350.,325.,300.,275.,250.,225.,200.,175.,150.,125.,100.,70.,50.,30.,20.,10.,7.,5.,3.,2.,1.] - rundir: '{{ task_run_fcst.rundir }}/postprd/{{ timevars.fff }}' + rundir: '{{ task_run_fcst.fv3.rundir }}/postprd/{{ timevars.fff }}' platform: account: '{{ user.ACCOUNT }}' scheduler: '{{ platform.SCHED }}' @@ -2151,6 +2045,7 @@ task_run_post: # This is a helper section to help alleviate repetitive and/or tedious syntax for working with # datetime objects. Keys map to commonly used variable names. timevars: + yyyymmdd: '{{ cycle.strftime("%Y%m%d") }}' yyyymmddhh: '{{ cycle.strftime("%Y%m%d%H") }}' hh: '{{ cycle.strftime("%H") }}' valid_time: "{{ (cycle + leadtime).strftime('%Y-%m-%d_%H:%M:%S') }}" @@ -2364,10 +2259,6 @@ global: # A list of names for the ensemble member names following the format # mem001, mem002, etc. # - # FV3_NML_ENSMEM_FPS: - # Paths to the ensemble member corresponding namelists in the - # experiment directory - # # ENS_TIME_LAG_HRS: # Time lag (in hours) to use for each ensemble member. For a deterministic # forecast, this is a one-element array. Default values of array elements @@ -2378,70 +2269,21 @@ global: DO_ENSEMBLE: false NUM_ENS_MEMBERS: 0 ENSMEM_NAMES: '{% for m in range(global.NUM_ENS_MEMBERS) %}{{ "mem%03d, " % m }}{% endfor %}' - FV3_NML_ENSMEM_FPS: '{% for mem in global.ENSMEM_NAMES %}{{"%s/%s_%s" % workflow.EXPTDIR, workflow.FV3_NML_FN, mem }}{% endfor %}' ENS_TIME_LAG_HRS: '[ {% for m in range([1,global.NUM_ENS_MEMBERS]|max) %} 0, {% endfor %} ]' # #----------------------------------------------------------------------- # - # Set default ad-hoc stochastic physics options. - # For detailed documentation of these parameters, see: - # https://stochastic-physics.readthedocs.io/en/ufs_public_release/namelist_options.html + # Switches for using ad-hoc stochastic physics options. + # + # For specific namelist settings associated with these switches, see + # stochastic_params.yaml # #----------------------------------------------------------------------- # + DO_SPP: false DO_SHUM: false DO_SPPT: false DO_SKEB: false - ISEED_SPPT: 1 - ISEED_SHUM: 2 - ISEED_SKEB: 3 - NEW_LSCALE: true - SHUM_MAG: 0.006 #Variable "shum" in input.nml - SHUM_LSCALE: 150000 - SHUM_TSCALE: 21600 #Variable "shum_tau" in input.nml - SHUM_INT: 3600 #Variable "shumint" in input.nml - SPPT_MAG: 0.7 #Variable "sppt" in input.nml - SPPT_LOGIT: true - SPPT_LSCALE: 150000 - SPPT_TSCALE: 21600 #Variable "sppt_tau" in input.nml - SPPT_INT: 3600 #Variable "spptint" in input.nml - SPPT_SFCLIMIT: true - SKEB_MAG: 0.5 #Variable "skeb" in input.nml - SKEB_LSCALE: 150000 - SKEB_TSCALE: 21600 #Variable "skeb_tau" in input.nml - SKEB_INT: 3600 #Variable "skebint" in input.nml - SKEBNORM: 1 - SKEB_VDOF: 10 - USE_ZMTNBLCK: false - # - #----------------------------------------------------------------------- - # - # Set default SPP stochastic physics options. - # SPP perturbs specific tuning parameters within a physics parameterization - # (unlike SPPT, which multiplies overall physics tendencies by a random - # perturbation field *after* the call to the physics suite).Patterns evolve - # and are applied at each time step. Each SPP option is an array, - # applicable (in order) to the HRRR-based parameterization listed in SPP_VAR_LIST. - # Enter each value of the array in config.yaml as shown below without commas - # or single quotes (e.g., SPP_VAR_LIST: [ "pbl" "sfc" "mp" "rad" "gwd" ] ). - # Both commas and single quotes will be added by Jinja when creating the - # namelist. - # - # Note that SPP is currently only available for specific physics schemes - # used in the RAP/HRRR physics suite. Users need to be aware of which SDF - # is chosen when turning this option on. - # - #----------------------------------------------------------------------- - # - DO_SPP: false - SPP_VAR_LIST: [ "pbl", "sfc", "mp", "rad", "gwd" ] - SPP_MAG_LIST: [ 0.2, 0.2, 0.75, 0.2, 0.2 ] #Variable "spp_prt_list" in input.nml - SPP_LSCALE: [ 150000.0, 150000.0, 150000.0, 150000.0, 150000.0 ] - SPP_TSCALE: [ 21600.0, 21600.0, 21600.0, 21600.0, 21600.0 ] #Variable "spp_tau" in input.nml - SPP_SIGTOP1: [ 0.1, 0.1, 0.1, 0.1, 0.1 ] - SPP_SIGTOP2: [ 0.025, 0.025, 0.025, 0.025, 0.025 ] - SPP_STDDEV_CUTOFF: [ 1.5, 1.5, 2.5, 1.5, 1.5 ] - ISEED_SPP: [ 4, 5, 6, 7, 8 ] # #----------------------------------------------------------------------- # @@ -2449,26 +2291,10 @@ global: # Please be aware of the SDF that you choose if you wish to turn on LSM # SPP. # - # SPP in LSM schemes is handled in the &nam_sfcperts namelist block - # instead of in &nam_sppperts, where all other SPP is implemented. - # # Perturbations to soil moisture content (SMC) are only applied at the # first time step. # - # LSM perturbations include SMC - soil moisture content (volume fraction), - # VGF - vegetation fraction, ALB - albedo, SAL - salinity, - # EMI - emissivity, ZOL - surface roughness (cm), and STC - soil temperature. - # - # Only five perturbations at a time can be applied currently, but all seven - # are shown below. In addition, only one unique iseed value is allowed - # at the moment, and is used for each pattern. - # - DO_LSM_SPP: false #If true, sets lndp_type=2 - LSM_SPP_TSCALE: [ 21600, 21600, 21600, 21600, 21600, 21600, 21600 ] - LSM_SPP_LSCALE: [ 150000, 150000, 150000, 150000, 150000, 150000, 150000 ] - ISEED_LSM_SPP: [ 9 ] - LSM_SPP_VAR_LIST: [ "smc", "vgf", "alb", "sal", "emi", "zol", "stc" ] - LSM_SPP_MAG_LIST: [ 0.017, 0.001, 0.001, 0.001, 0.001, 0.001, 0.2 ] + DO_LSM_SPP: false # #----------------------------------------------------------------------- # @@ -2481,16 +2307,6 @@ global: #----------------------------------------------------------------------- # HALO_BLEND: 10 - # - #----------------------------------------------------------------------- - # - # PRINT_DIFF_PGR: - # Option to turn on/off the pressure tendency diagnostic. - # - #----------------------------------------------------------------------- - # - PRINT_DIFF_PGR: false - #---------------------------- # verification (vx) parameters #----------------------------- diff --git a/ush/config_defaults_aqm.yaml b/ush/config_defaults_aqm.yaml index 928b508632..9580b98a9d 100644 --- a/ush/config_defaults_aqm.yaml +++ b/ush/config_defaults_aqm.yaml @@ -10,6 +10,50 @@ task_run_post: output_file_labels: - cmaq +task_run_fcst: + aqm: + control_file: + template_file: "{{ workflow.PARMdir }}/aqm.rc" + template_values: + do_aqm_dust: true + do_aqm_canopy: false + do_aqm_product: true + ccpp_phys_suite: "{{ workflow.CCPP_PHYS_SUITE }}" + init_concentrations: true + aqm_rc_bio_file_fp: "{{ platform.FIXaqm }}/bio/BEIS_SARC401.ncf" + fixaqm: "{{ platform.FIXaqm }}" + aqm_rc_fire_file_fp: "{{ 'COMIN' | env }}/GBBEPx_C401GRID.emissions_v003_{{ timevars.yyyymmdd }}_t{{ timevars.hh }}z.nc" + aqm_rc_fire_frequency: static + aqm_rc_dust_file_fp: "{{ platform.FIXaqm }}/dust/FENGSHA_p8_10km_inputs_{{ workflow.PREDEF_GRID_NAME }}.nc" + aqm_rc_canopy_file_fp: "{{ platform.FIXaqm }}/canopy/gfs.t12z.geo.{{ cdate.strftime('%m') }}.canopy_regrid.nc" + aqm_rc_product_fn: aqm.prod.rc + aqm_rc_product_frequency: hourly + fv3: + namelist: + update_values: + fv_core_nml: + k_split: 1 + n_split: 8 + gfs_physics_nml: + cplaqm: true + cplocn2atm: false + fscav_aero: ["aacd:0.0", "acet:0.0", "acrolein:0.0", "acro_primary:0.0", "ald2:0.0", + "ald2_primary:0.0", "aldx:0.0", "benzene:0.0", "butadiene13:0.0", "cat1:0.0", + "cl2:0.0", "clno2:0.0", "co:0.0", "cres:0.0", "cron:0.0", + "ech4:0.0", "epox:0.0", "eth:0.0", "etha:0.0", "ethy:0.0", + "etoh:0.0", "facd:0.0", "fmcl:0.0", "form:0.0", "form_primary:0.0", + "gly:0.0", "glyd:0.0", "h2o2:0.0", "hcl:0.0", "hg:0.0", + "hgiigas:0.0", "hno3:0.0", "hocl:0.0", "hono:0.0", "hpld:0.0", + "intr:0.0", "iole:0.0", "isop:0.0", "ispd:0.0", "ispx:0.0", + "ket:0.0", "meoh:0.0", "mepx:0.0", "mgly:0.0", "n2o5:0.0", + "naph:0.0", "no:0.0", "no2:0.0", "no3:0.0", "ntr1:0.0", + "ntr2:0.0", "o3:0.0", "ole:0.0", "opan:0.0", "open:0.0", + "opo3:0.0", "pacd:0.0", "pan:0.0", "panx:0.0", "par:0.0", + "pcvoc:0.0", "pna:0.0", "prpa:0.0", "rooh:0.0", "sesq:0.0", + "so2:0.0", "soaalk:0.0", "sulf:0.0", "terp:0.0", "tol:0.0", + "tolu:0.0", "vivpo1:0.0", "vlvoo1:0.0", "vlvoo2:0.0", "vlvpo1:0.0", + "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", + "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] task_pre_post_stat: execution: &default_aqm_execution cores: 1 diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 3f039af335..70736a7933 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -26,17 +26,15 @@ mkdir_vrfy, mv_vrfy, check_for_preexist_dir_file, - cfg_to_yaml_str, - find_pattern_in_str, flatten_dict, ) from check_python_version import check_python_version from get_crontab_contents import add_crontab_line from setup import setup -from set_fv3nml_sfc_climo_filenames import set_fv3nml_sfc_climo_filenames from uwtools.api.config import get_nml_config, get_yaml_config, realize +from uwtools.api.template import render from uwtools.api import rocoto as uwrocoto @@ -73,7 +71,7 @@ def generate_FV3LAM_wflow( # The setup function reads the user configuration file and fills in # non-user-specified values from config_defaults.yaml - expt_config = setup(ushdir,debug=debug) + expt_config = setup(ushdir, debug=debug) # # ----------------------------------------------------------------------- @@ -133,22 +131,27 @@ def generate_FV3LAM_wflow( verbose=debug, ) - with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file: + with open(wflow_launch_script_fp, "r", encoding="utf-8") as launch_script_file: launch_script_content = launch_script_file.read() # Stage an experiment-specific launch file in the experiment directory template = Template(launch_script_content) # The script needs several variables from the workflow and user sections - template_variables = {**expt_config["user"], **expt_config["workflow"], - "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])} - launch_content = template.safe_substitute(template_variables) + template_variables = { + **expt_config["user"], + **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str( + expt_config["constants"]["valid_vals_BOOLEAN"] + ), + } + launch_content = template.safe_substitute(template_variables) launch_fp = os.path.join(exptdir, wflow_launch_script_fn) - with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn: + with open(launch_fp, "w", encoding="utf-8") as expt_launch_fn: expt_launch_fn.write(launch_content) - os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR) + os.chmod(launch_fp, os.stat(launch_fp).st_mode | S_IXUSR) # # ----------------------------------------------------------------------- @@ -168,9 +171,13 @@ def generate_FV3LAM_wflow( # pylint: disable=undefined-variable if USE_CRON_TO_RELAUNCH: - add_crontab_line(called_from_cron=False,machine=expt_config["user"]["MACHINE"], - crontab_line=expt_config["workflow"]["CRONTAB_LINE"], - exptdir=exptdir,debug=debug) + add_crontab_line( + called_from_cron=False, + machine=expt_config["user"]["MACHINE"], + crontab_line=expt_config["workflow"]["CRONTAB_LINE"], + exptdir=exptdir, + debug=debug, + ) # # Copy or symlink fix files @@ -210,7 +217,7 @@ def generate_FV3LAM_wflow( # # ----------------------------------------------------------------------- # - if USE_MERRA_CLIMO: + if expt_config["workflow"]["USE_MERRA_CLIMO"]: log_info( f""" Copying MERRA2 aerosol climatology data files from system directory @@ -230,33 +237,6 @@ def generate_FV3LAM_wflow( else: cp_vrfy(os.path.join(FIXaer, "merra2.aerclim*.nc"), FIXclim) cp_vrfy(os.path.join(FIXlut, "optics*.dat"), FIXclim) - # - # ----------------------------------------------------------------------- - # - # Copy templates of various input files to the experiment directory. - # - # ----------------------------------------------------------------------- - # - log_info( - """ - Copying templates of various input files to the experiment directory...""", - verbose=debug, - ) - - log_info( - """ - Copying the template data table file to the experiment directory...""", - verbose=debug, - ) - cp_vrfy(DATA_TABLE_TMPL_FP, DATA_TABLE_FP) - - log_info( - """ - Copying the template field table file to the experiment directory...""", - verbose=debug, - ) - cp_vrfy(FIELD_TABLE_TMPL_FP, FIELD_TABLE_FP) - # # Copy the CCPP physics suite definition file from its location in the # clone of the FV3 code repository to the experiment directory (EXPT- @@ -282,375 +262,6 @@ def generate_FV3LAM_wflow( verbose=debug, ) cp_vrfy(FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP) - # - # ----------------------------------------------------------------------- - # - # Set parameters in the FV3-LAM namelist file. - # - # ----------------------------------------------------------------------- - # - log_info( - f""" - Setting parameters in weather model's namelist file (FV3_NML_FP): - FV3_NML_FP = '{FV3_NML_FP}'""", - verbose=debug, - ) - # - # Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. - # These need to be set in the FV3-LAM Fortran namelist file. They represent - # the number of cell vertices in the x and y directions on the regional - # grid. - # - npx = NX + 1 - npy = NY + 1 - # - # For the physics suites that use RUC LSM, set the parameter kice to 9, - # Otherwise, leave it unspecified (which means it gets set to the default - # value in the forecast model). - # - kice = None - if SDF_USES_RUC_LSM: - kice = 9 - # - # Set lsoil, which is the number of input soil levels provided in the - # chgres_cube output NetCDF file. This is the same as the parameter - # nsoill_out in the namelist file for chgres_cube. [On the other hand, - # the parameter lsoil_lsm (not set here but set in input.nml.FV3 and/or - # FV3.input.yml) is the number of soil levels that the LSM scheme in the - # forecast model will run with.] Here, we use the same approach to set - # lsoil as the one used to set nsoill_out in exregional_make_ics.sh. - # See that script for details. - # - # NOTE: - # May want to remove lsoil from FV3.input.yml (and maybe input.nml.FV3). - # Also, may want to set lsm here as well depending on SDF_USES_RUC_LSM. - # - lsoil = 4 - if EXTRN_MDL_NAME_ICS in ("HRRR", "RAP") and SDF_USES_RUC_LSM: - lsoil = 9 - if CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km": - lsoil = "" - # - # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the namelist variables that are physics- - # suite-independent need to be set to. Below, this variable will be - # passed to a python script that will in turn set the values of these - # variables in the namelist file. - # - # IMPORTANT: - # If we want a namelist variable to be removed from the namelist file, - # in the "settings" variable below, we need to set its value to the - # string "null". This is equivalent to setting its value to - # !!python/none - # in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the - # suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. - # - # It turns out that setting the variable to an empty string also works - # to remove it from the namelist! Which is better to use?? - # - settings = {} - settings["atmos_model_nml"] = { - "blocksize": BLOCKSIZE, - "ccpp_suite": CCPP_PHYS_SUITE, - } - - fv_core_nml_dict = {} - fv_core_nml_dict.update({ - "target_lon": LON_CTR, - "target_lat": LAT_CTR, - "nrows_blend": HALO_BLEND, - # - # Question: - # For a ESGgrid type grid, what should stretch_fac be set to? This depends - # on how the FV3 code uses the stretch_fac parameter in the namelist file. - # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) - # to something like 0.9999, but is it ok to set it to that here in the - # FV3 namelist file? - # - "stretch_fac": STRETCH_FAC, - "npx": npx, - "npy": npy, - "layout": [LAYOUT_X, LAYOUT_Y], - "bc_update_interval": LBC_SPEC_INTVL_HRS, - }) - if CCPP_PHYS_SUITE == "FV3_GFS_v15p2": - if CPL_AQM: - fv_core_nml_dict.update({ - "dnats": 5 - }) - else: - fv_core_nml_dict.update({ - "dnats": 1 - }) - elif CCPP_PHYS_SUITE == "FV3_GFS_v16": - if CPL_AQM: - fv_core_nml_dict.update({ - "hord_tr": 8, - "dnats": 5, - "nord": 2 - }) - else: - fv_core_nml_dict.update({ - "dnats": 1 - }) - elif CCPP_PHYS_SUITE == "FV3_GFS_v17_p8": - if CPL_AQM: - fv_core_nml_dict.update({ - "dnats": 4 - }) - else: - fv_core_nml_dict.update({ - "dnats": 0 - }) - - settings["fv_core_nml"] = fv_core_nml_dict - - gfs_physics_nml_dict = {} - gfs_physics_nml_dict.update({ - "kice": kice or None, - "lsoil": lsoil or None, - "print_diff_pgr": PRINT_DIFF_PGR, - }) - - if CPL_AQM: - gfs_physics_nml_dict.update({ - "cplaqm": True, - "cplocn2atm": False, - "fscav_aero": [ - "aacd:0.0", "acet:0.0", "acrolein:0.0", "acro_primary:0.0", "ald2:0.0", - "ald2_primary:0.0", "aldx:0.0", "benzene:0.0", "butadiene13:0.0", "cat1:0.0", - "cl2:0.0", "clno2:0.0", "co:0.0", "cres:0.0", "cron:0.0", - "ech4:0.0", "epox:0.0", "eth:0.0", "etha:0.0", "ethy:0.0", - "etoh:0.0", "facd:0.0", "fmcl:0.0", "form:0.0", "form_primary:0.0", - "gly:0.0", "glyd:0.0", "h2o2:0.0", "hcl:0.0", "hg:0.0", - "hgiigas:0.0", "hno3:0.0", "hocl:0.0", "hono:0.0", "hpld:0.0", - "intr:0.0", "iole:0.0", "isop:0.0", "ispd:0.0", "ispx:0.0", - "ket:0.0", "meoh:0.0", "mepx:0.0", "mgly:0.0", "n2o5:0.0", - "naph:0.0", "no:0.0", "no2:0.0", "no3:0.0", "ntr1:0.0", - "ntr2:0.0", "o3:0.0", "ole:0.0", "opan:0.0", "open:0.0", - "opo3:0.0", "pacd:0.0", "pan:0.0", "panx:0.0", "par:0.0", - "pcvoc:0.0", "pna:0.0", "prpa:0.0", "rooh:0.0", "sesq:0.0", - "so2:0.0", "soaalk:0.0", "sulf:0.0", "terp:0.0", "tol:0.0", - "tolu:0.0", "vivpo1:0.0", "vlvoo1:0.0", "vlvoo2:0.0", "vlvpo1:0.0", - "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", - "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] - }) - settings["gfs_physics_nml"] = gfs_physics_nml_dict - - # - # Add to "settings" the values of those namelist variables that specify - # the paths to fixed files in the FIXam directory. As above, these namelist - # variables are physcs-suite-independent. - # - # Note that the array FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING contains - # the mapping between the namelist variables and the names of the files - # in the FIXam directory. Here, we loop through this array and process - # each element to construct each line of "settings". - # - dummy_run_dir = os.path.join(EXPTDIR, "any_cyc") - if DO_ENSEMBLE: - dummy_run_dir = os.path.join(dummy_run_dir, "any_ensmem") - - regex_search = "^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" - num_nml_vars = len(FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING) - namsfc_dict = {} - for i in range(num_nml_vars): - - mapping = f"{FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING[i]}" - tup = find_pattern_in_str(regex_search, mapping) - nml_var_name = tup[0] - FIXam_fn = tup[1] - - fp = '""' - if FIXam_fn: - fp = os.path.join(FIXam, FIXam_fn) - # - # If not in NCO mode, for portability and brevity, change fp so that it - # is a relative path (relative to any cycle directory immediately under - # the experiment directory). - # - if RUN_ENVIR != "nco": - fp = os.path.relpath(os.path.realpath(fp), start=dummy_run_dir) - # - # Add a line to the variable "settings" that specifies (in a yaml-compliant - # format) the name of the current namelist variable and the value it should - # be set to. - # - namsfc_dict[nml_var_name] = fp - # - # Add namsfc_dict to settings - # - settings["namsfc"] = namsfc_dict - # - # Use netCDF4 when running the North American 3-km domain due to file size. - # - if PREDEF_GRID_NAME == "RRFS_NA_3km": - settings["fms2_io_nml"] = {"netcdf_default_format": "netcdf4"} - - settings_str = cfg_to_yaml_str(settings) - - log_info( - """ - The variable 'settings' specifying values of the weather model's - namelist variables has been set as follows:\n""", - verbose=debug, - ) - log_info("\nsettings =\n\n" + settings_str, verbose=debug) - # - # ----------------------------------------------------------------------- - # - # Create a new FV3 namelist file - # - # ----------------------------------------------------------------------- - # - - physics_cfg = get_yaml_config(FV3_NML_YAML_CONFIG_FP) - base_namelist = get_nml_config(FV3_NML_BASE_SUITE_FP) - base_namelist.update_from(physics_cfg[CCPP_PHYS_SUITE]) - base_namelist.update_from(settings) - for sect, values in base_namelist.copy().items(): - if not values: - del base_namelist[sect] - continue - for k, v in values.copy().items(): - if v is None: - del base_namelist[sect][k] - base_namelist.dump(Path(FV3_NML_FP)) - # - # If not running the TN_MAKE_GRID task (which implies the workflow will - # use pregenerated grid files), set the namelist variables specifying - # the paths to surface climatology files. These files are located in - # (or have symlinks that point to them) in the FIXlam directory. - # - # Note that if running the TN_MAKE_GRID task, this action usually cannot - # be performed here but must be performed in that task because the names - # of the surface climatology files depend on the CRES parameter (which is - # the C-resolution of the grid), and this parameter is in most workflow - # configurations is not known until the grid is created. - # - if not expt_config['rocoto']['tasks'].get('task_make_grid'): - - set_fv3nml_sfc_climo_filenames(flatten_dict(expt_config), debug) - - # - # ----------------------------------------------------------------------- - # - # Add the relevant tendency-based stochastic physics namelist variables to - # "settings" when running with SPPT, SHUM, or SKEB turned on. If running - # with SPP or LSM SPP, set the "new_lscale" variable. Otherwise only - # include an empty "nam_stochy" stanza. - # - # ----------------------------------------------------------------------- - # - settings = {} - settings["gfs_physics_nml"] = { - "do_shum": DO_SHUM, - "do_sppt": DO_SPPT, - "do_skeb": DO_SKEB, - "do_spp": DO_SPP, - "n_var_spp": N_VAR_SPP, - "n_var_lndp": N_VAR_LNDP, - "lndp_type": LNDP_TYPE, - "fhcyc": FHCYC_LSM_SPP_OR_NOT, - } - nam_stochy_dict = {} - if DO_SPPT: - nam_stochy_dict.update( - { - "iseed_sppt": ISEED_SPPT, - "new_lscale": NEW_LSCALE, - "sppt": SPPT_MAG, - "sppt_logit": SPPT_LOGIT, - "sppt_lscale": SPPT_LSCALE, - "sppt_sfclimit": SPPT_SFCLIMIT, - "sppt_tau": SPPT_TSCALE, - "spptint": SPPT_INT, - "use_zmtnblck": USE_ZMTNBLCK, - } - ) - - if DO_SHUM: - nam_stochy_dict.update( - { - "iseed_shum": ISEED_SHUM, - "new_lscale": NEW_LSCALE, - "shum": SHUM_MAG, - "shum_lscale": SHUM_LSCALE, - "shum_tau": SHUM_TSCALE, - "shumint": SHUM_INT, - } - ) - - if DO_SKEB: - nam_stochy_dict.update( - { - "iseed_skeb": ISEED_SKEB, - "new_lscale": NEW_LSCALE, - "skeb": SKEB_MAG, - "skeb_lscale": SKEB_LSCALE, - "skebnorm": SKEBNORM, - "skeb_tau": SKEB_TSCALE, - "skebint": SKEB_INT, - "skeb_vdof": SKEB_VDOF, - } - ) - - if DO_SPP or DO_LSM_SPP: - nam_stochy_dict.update({"new_lscale": NEW_LSCALE}) - - settings["nam_stochy"] = nam_stochy_dict - # - # Add the relevant SPP namelist variables to "settings" when running with - # SPP turned on. Otherwise only include an empty "nam_sppperts" stanza. - # - nam_sppperts_dict = {} - if DO_SPP: - nam_sppperts_dict = { - "iseed_spp": ISEED_SPP, - "spp_lscale": SPP_LSCALE, - "spp_prt_list": SPP_MAG_LIST, - "spp_sigtop1": SPP_SIGTOP1, - "spp_sigtop2": SPP_SIGTOP2, - "spp_stddev_cutoff": SPP_STDDEV_CUTOFF, - "spp_tau": SPP_TSCALE, - "spp_var_list": SPP_VAR_LIST, - } - - settings["nam_sppperts"] = nam_sppperts_dict - # - # Add the relevant LSM SPP namelist variables to "settings" when running with - # LSM SPP turned on. - # - nam_sfcperts_dict = {} - if DO_LSM_SPP: - nam_sfcperts_dict = { - "lndp_type": LNDP_TYPE, - "lndp_model_type": LNDP_MODEL_TYPE, - "lndp_tau": LSM_SPP_TSCALE, - "lndp_lscale": LSM_SPP_LSCALE, - "iseed_lndp": ISEED_LSM_SPP, - "lndp_var_list": LSM_SPP_VAR_LIST, - "lndp_prt_list": LSM_SPP_MAG_LIST, - } - - settings["nam_sfcperts"] = nam_sfcperts_dict - - settings_str = cfg_to_yaml_str(settings) - # - #----------------------------------------------------------------------- - # - # Generate namelist files with stochastic physics if needed - # - #----------------------------------------------------------------------- - # - if any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): - realize( - input_config=FV3_NML_FP, - input_format="nml", - output_file=FV3_NML_STOCH_FP, - output_format="nml", - update_config=get_nml_config(settings), - ) # # ----------------------------------------------------------------------- @@ -718,7 +329,9 @@ def generate_FV3LAM_wflow( return EXPTDIR -def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> None: +def setup_logging( + logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False +) -> None: """ Sets up logging, printing high-priority (INFO and higher) messages to screen and printing all messages with detailed timing and routine info in the specified text file. If ``debug = True``, @@ -735,7 +348,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals formatter = logging.Formatter("%(name)-22s %(levelname)-8s %(message)s") - fh = logging.FileHandler(logfile, mode='w') + fh = logging.FileHandler(logfile, mode="w") fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logging.getLogger().addHandler(fh) @@ -758,13 +371,18 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals if __name__ == "__main__": - #Parse arguments + # Parse arguments parser = argparse.ArgumentParser( - description="Script for setting up a forecast and creating a workflow"\ - "according to the parameters specified in the config file\n") + description="Script for setting up a forecast and creating a workflow" + "according to the parameters specified in the config file\n" + ) - parser.add_argument('-d', '--debug', action='store_true', - help='Script will be run in debug mode with more verbose output') + parser.add_argument( + "-d", + "--debug", + action="store_true", + help="Script will be run in debug mode with more verbose output", + ) pargs = parser.parse_args() USHdir = os.path.dirname(os.path.abspath(__file__)) @@ -774,7 +392,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals # experiment/workflow. try: expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile, pargs.debug) - except: # pylint: disable=bare-except + except: # pylint: disable=bare-except logging.exception( dedent( f""" diff --git a/ush/link_fix.py b/ush/link_fix.py index f17597125d..187e68740c 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -413,7 +413,7 @@ def _parse_args(argv): target_dir=cfg["workflow"]["FIXlam"], ccpp_phys_suite=cfg["workflow"]["CCPP_PHYS_SUITE"], constants=cfg["constants"], - dot_or_uscore=cfg["workflow"]["DOT_OR_USCORE"], + dot_or_uscore="_", nhw=cfg["grid_params"]["NHW"], run_task=True, sfc_climo_fields=cfg["fixed_files"]["SFC_CLIMO_FIELDS"], diff --git a/ush/machine/gaea.yaml b/ush/machine/gaea.yaml index 5e0d0e54e4..66bcfcc989 100644 --- a/ush/machine/gaea.yaml +++ b/ush/machine/gaea.yaml @@ -48,5 +48,5 @@ rocoto: metatask_run_ensemble: task_run_fcst_mem#mem#: cores: !remove - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.fv3.execution.threads // 1 }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ task_run_fcst.fv3.execution.batchargs.nodes }}:ppn={{ task_run_fcst.fv3.execution.batchargs.tasks_per_node }}' diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index edf852606b..6ecb64b041 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -50,5 +50,4 @@ rocoto: metatask_run_ensemble: task_run_fcst_mem#mem#: cores: !remove - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.fv3.execution.threads }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' diff --git a/ush/machine/hercules.yaml b/ush/machine/hercules.yaml index f0e47c594f..9c5bb63b04 100644 --- a/ush/machine/hercules.yaml +++ b/ush/machine/hercules.yaml @@ -57,5 +57,5 @@ rocoto: metatask_run_ensemble: task_run_fcst_mem#mem#: cores: !remove - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.fv3.execution.threads }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ task_run_fcst.fv3.execution.batchargs.nodes }}:ppn={{ task_run_fcst.fv3.execution.batchargs.tasks_per_node }}' diff --git a/ush/machine/jet.yaml b/ush/machine/jet.yaml index 1a2d67784b..0b593c149c 100644 --- a/ush/machine/jet.yaml +++ b/ush/machine/jet.yaml @@ -50,6 +50,5 @@ rocoto: metatask_run_ensemble: task_run_fcst_mem#mem#: cores: !remove - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' - + native: '--cpus-per-task {{ task_run_fcst.fv3.execution.threads }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ task_run_fcst.fv3.execution.batchargs.nodes }}:ppn={{ task_run_fcst.fv3.execution.batchargs.tasks_per_node }}' diff --git a/ush/machine/linux.yaml b/ush/machine/linux.yaml index 9058f1d494..5ee68bad0f 100644 --- a/ush/machine/linux.yaml +++ b/ush/machine/linux.yaml @@ -34,5 +34,5 @@ rocoto: metatask_run_ensemble: task_run_fcst_mem#mem#: cores: !int '{{ task_run_fcst.PE_MEMBER01 // 1 }}' - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive' + native: '--cpus-per-task {{ task_run_fcst.fv3.execution.threads }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' nodes: !remove diff --git a/ush/machine/orion.yaml b/ush/machine/orion.yaml index 65feccfacb..8581ec8832 100644 --- a/ush/machine/orion.yaml +++ b/ush/machine/orion.yaml @@ -56,5 +56,5 @@ rocoto: metatask_run_ensemble: task_run_fcst_mem#mem#: cores: !remove - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.fv3.execution.threads }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ task_run_fcst.fv3.execution.batchargs.nodes }}:ppn={{ task_run_fcst.fv3.execution.batchargs.tasks_per_node }}' diff --git a/ush/machine/wcoss2.yaml b/ush/machine/wcoss2.yaml index 849562e3b0..1fb953a749 100644 --- a/ush/machine/wcoss2.yaml +++ b/ush/machine/wcoss2.yaml @@ -54,7 +54,7 @@ rocoto: task_run_fcst_mem#mem#: cores: !remove native: '{{ platform.SCHED_NATIVE_CMD }}' - nodes: '{{ nnodes }}:ppn={{ ppn }}:tpp={{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST }}' + nodes: '{{ task_run_fcst.fv3.execution.batchargs.nodes }}:ppn={{ task_run_fcst.fv3.execution.batchargs.tasks_per_node }}:tpp={{ task_run_fcst.fv3.execution.threads }}' nodesize: '{{ platform.NCORES_PER_NODE }}' metatask_nexus_emission: task_nexus_emission_#nspt#: diff --git a/ush/quilting.yaml b/ush/quilting.yaml new file mode 100644 index 0000000000..96c937f4fd --- /dev/null +++ b/ush/quilting.yaml @@ -0,0 +1,49 @@ + + +no_quilting: + task_run_fcst: + PE_MEMBER01: !int '{{ task_run_fcst.LAYOUT_Y * task_run_fcst.LAYOUT_X }}' + fv3: + model_configure: + update_values: + write_groups: !remove + write_tasks_per_group: !remove + num_files: !remove + filename_base: !remove + output_file: !remove + output_fh: !remove + nsout: !remove + output_grid: !remove + +lambert_conformal: + task_run_fcst: + fv3: + model_configure: + update_values: + cen_lon: !float '{{ task_run_fcst.WRTCMP_cen_lon }}' + cen_lat: !float '{{ task_run_fcst.WRTCMP_cen_lat }}' + stdlat1: !float '{{ task_run_fcst.WRTCMP_stdlat1 }}' + stdlat2: !float '{{ task_run_fcst.WRTCMP_stdlat2 }}' + nx: !int '{{ task_run_fcst.WRTCMP_nx }}' + ny: !int '{{ task_run_fcst.WRTCMP_ny }}' + lon1: !float '{{ task_run_fcst.WRTCMP_lon_lwr_left }}' + lat1: !float '{{ task_run_fcst.WRTCMP_lat_lwr_left }}' + dx: !float '{{ task_run_fcst.WRTCMP_dx }}' + dy: !float '{{ task_run_fcst.WRTCMP_dy }}' + +regional_latlon: + task_run_fcst: &latlon_reference + fv3: + model_configure: + update_values: + cen_lon: !float '{{ task_run_fcst.WRTCMP_cen_lon }}' + cen_lat: !float '{{ task_run_fcst.WRTCMP_cen_lat }}' + lon1: !float '{{ task_run_fcst.WRTCMP_lon_lwr_left }}' + lat1: !float '{{ task_run_fcst.WRTCMP_lat_lwr_left }}' + lon2: !float '{{ task_run_fcst.WRTCMP_lon_upr_rght }}' + lat2: !float '{{ task_run_fcst.WRTCMP_lat_upr_rght }}' + dlon: !float '{{ task_run_fcst.WRTCMP_dlon }}' + dlat: !float '{{ task_run_fcst.WRTCMP_dlat }}' + +rotated_latlon: + <<: *latlon_reference diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py deleted file mode 100644 index 7cc9d457f5..0000000000 --- a/ush/set_fv3nml_sfc_climo_filenames.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env python3 - -""" -Update filenames for surface climotology files in the namelist. -""" - -import argparse -import os -import re -import sys -from textwrap import dedent - -from python_utils import ( - cfg_to_yaml_str, - check_var_valid_value, - flatten_dict, - import_vars, - print_info_msg, -) - -from uwtools.api.config import get_nml_config, get_yaml_config, realize - - -VERBOSE = os.environ.get("VERBOSE", "true") - -NEEDED_VARS = [ - "CRES", - "DO_ENSEMBLE", - "EXPTDIR", - "FIXlam", - "FV3_NML_FP", - "PARMdir", - "RUN_ENVIR", - ] - - -# pylint: disable=undefined-variable - -def set_fv3nml_sfc_climo_filenames(config, debug=False): - """ - Sets the values of the variables in the forecast model's namelist file that specify the paths - to the surface climatology files on the FV3LAM native grid (which are either pregenerated - or created by the ``make_sfc_climo`` task). Note that the workflow - generation scripts create symlinks to these surface climatology files - in the ``FIXlam`` directory, and the values in the namelist file that get - set by this function are relative or full paths to these links. - - Args: - config (dict): Section of configuration file specifying surface climatology fields - (as a flattened dictionary) - debug (bool): Enable extra output for debugging - Returns: - None - """ - - import_vars(dictionary=config, env_vars=NEEDED_VARS) - - fixed_cfg = get_yaml_config(os.path.join(PARMdir, "fixed_files_mapping.yaml"))["fixed_files"] - - # The regular expression regex_search set below will be used to extract - # from the elements of the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING - # the name of the namelist variable to set and the corresponding surface - # climatology field from which to form the name of the surface climatology file - regex_search = "^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" - - # Set the suffix of the surface climatology files. - suffix = "tileX.nc" - - # create yaml-compliant string - settings = {} - - dummy_run_dir = os.path.join(EXPTDIR, "any_cyc") - if DO_ENSEMBLE == "TRUE": - dummy_run_dir += os.sep + "any_ensmem" - - namsfc_dict = {} - for mapping in fixed_cfg["FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING"]: - nml_var_name, sfc_climo_field_name = re.search(regex_search, mapping).groups() - - check_var_valid_value(sfc_climo_field_name, fixed_cfg["SFC_CLIMO_FIELDS"]) - - file_path = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") - if RUN_ENVIR != "nco": - file_path = os.path.relpath(os.path.realpath(file_path), start=dummy_run_dir) - - namsfc_dict[nml_var_name] = file_path - - settings["namsfc_dict"] = namsfc_dict - settings_str = cfg_to_yaml_str(settings) - - print_info_msg( - dedent( - f""" - The variable 'settings' specifying values of the namelist variables - has been set as follows:\n - settings = - - {settings_str} - """ - ), - verbose=debug, - ) - - realize( - input_config=FV3_NML_FP, - input_format="nml", - output_file=FV3_NML_FP, - output_format="nml", - update_config=get_nml_config(settings), - ) - -def _parse_args(argv): - """Parse command line arguments""" - parser = argparse.ArgumentParser(description="Set surface climatology fields.") - - parser.add_argument( - "-p", - "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", - ) - parser.add_argument('-d', '--debug', action='store_true', - help='Script will be run in debug mode with more verbose output') - - return parser.parse_args(argv) - - -if __name__ == "__main__": - args = _parse_args(sys.argv[1:]) - cfg = get_yaml_config(args.path_to_defns) - cfg = flatten_dict(cfg) - set_fv3nml_sfc_climo_filenames(cfg, args.debug) diff --git a/ush/setup.py b/ush/setup.py index 616e0d4492..809f2ff7a0 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -2,6 +2,7 @@ import copy import datetime +import glob import logging import json import os @@ -12,7 +13,7 @@ from textwrap import dedent import yaml -from uwtools.api.config import get_yaml_config +from uwtools.api.config import get_nml_config, get_yaml_config from link_fix import link_fix from python_utils import ( @@ -48,6 +49,7 @@ from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid from uwtools.api.config import get_yaml_config, validate + def load_config_for_setup(ushdir, default_config_path, user_config_path): """Load in the default, machine, and user configuration files into Python dictionaries. Return the combined experiment dictionary. @@ -96,7 +98,9 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): errmsg = f"Invalid key(s) specified in {user_config}:\n" for entry in invalid: errmsg = errmsg + f"{entry} = {invalid[entry]}\n" - errmsg = errmsg + f"\nCheck {default_config} for allowed user-specified variables\n" + errmsg = ( + errmsg + f"\nCheck {default_config} for allowed user-specified variables\n" + ) raise Exception(errmsg) # Mandatory variables *must* be set in the user's config; the default value is invalid @@ -134,7 +138,6 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): # Load the constants file constants = get_yaml_config(ushdir / "constants.yaml") - # Load the rocoto workflow default file default_workflow = Path(ushdir).parent / "parm" / "wflow" / "default_workflow.yaml" workflow_config = get_yaml_config(default_workflow) @@ -152,8 +155,27 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): # Load CCPP suite-specific settings ccpp_suite = default_config['workflow']['CCPP_PHYS_SUITE'] ccpp_config = get_yaml_config(ushdir / "ccpp_suites_defaults.yaml").get(ccpp_suite, {}) + + # Create a temporary section containing the physics-based namelist settings + srw_base_file = Path(ushdir).parent / "parm" / "input.nml.FV3" + srw_nml = get_nml_config(srw_base_file) + if ccpp_config: + # Check to see if there are namelist updates + try: + nml_config = ccpp_config["fv3_namelist_settings"] + except KeyError: + logging.info(f"No updates for the namelist for suite {ccpp_suite}") + nml_config = {} + + nml_config = get_nml_config(nml_config) + srw_nml.update_from(nml_config) + default_config.update_from({"fv3_namelist_settings": get_yaml_config(srw_nml.data).data}) + + # Apply any suite changes not related to the model namelist + ccpp_config.pop("fv3_namelist_settings", None) default_config.update_from(ccpp_config) + # Load external model-specific settings external_cfg = get_yaml_config(ushdir / "external_model_defaults.yaml") for bcs in ("ics", "lbcs"): @@ -164,6 +186,14 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): {bcs_task: external_cfg.get(external_model, {}).get(bcs_task, {}) } ) + # Load stochastic physics params + stochastic_params = get_yaml_config(Path(ushdir, "stochastic_params.yaml")) + fcst_config = default_config["task_run_fcst"]["fv3"] + fcst_nml_config = get_yaml_config(fcst_config["namelist"]["update_values"]) + for switch_name in ("do_spp", "do_sppt", "do_shum", "do_skeb", "do_lsm_spp"): + if default_config["global"][switch_name.upper()]: + fcst_nml_config.update_from(stochastic_params.get(switch_name)) + # Set "Home" directory, the top-level ufs-srweather-app directory homedir = Path(__file__).parent.parent.resolve() default_config["user"]["HOMEdir"] = str(homedir) @@ -201,7 +231,6 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): def set_srw_paths(ushdir, expt_config): - """ Generates a dictionary of directories that describe the SRW App structure, i.e., where the SRW App is installed and the paths to @@ -359,7 +388,6 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): fcst_len_hrs_max = {fcst_len_hrs_max}""" ) - # # ----------------------------------------------------------------------- # @@ -423,14 +451,15 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): # ----------------------------------------------------------------------- # - rocoto_config = expt_config.get('rocoto', {}) + platform_config = expt_config["platform"] + rocoto_config = expt_config.get("rocoto", {}) rocoto_tasks = rocoto_config.get("tasks") - run_make_grid = rocoto_tasks.get('task_make_grid') is not None - run_make_orog = rocoto_tasks.get('task_make_orog') is not None - run_make_sfc_climo = rocoto_tasks.get('task_make_sfc_climo') is not None + run_make_grid = rocoto_tasks.get("task_make_grid") is not None + run_make_orog = rocoto_tasks.get("task_make_orog") is not None + run_make_sfc_climo = rocoto_tasks.get("task_make_sfc_climo") is not None # Necessary tasks are turned on - pregen_basedir = expt_config["platform"].get("DOMAIN_PREGEN_BASEDIR") + pregen_basedir = platform_config.get("DOMAIN_PREGEN_BASEDIR") if pregen_basedir is None and not ( run_make_grid and run_make_orog and run_make_sfc_climo ): @@ -444,13 +473,13 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): ) # A batch system account is specified - if expt_config["platform"].get("WORKFLOW_MANAGER") is not None: + if platform_config.get("WORKFLOW_MANAGER") is not None: if not expt_config.get("user").get("ACCOUNT"): raise Exception( dedent( f""" ACCOUNT must be specified in config or machine file if using a workflow manager. - WORKFLOW_MANAGER = {expt_config["platform"].get("WORKFLOW_MANAGER")}\n""" + WORKFLOW_MANAGER = {platform_config.get("WORKFLOW_MANAGER")}\n""" ) ) @@ -467,12 +496,12 @@ def _remove_tag(tasks, tag): _remove_tag(task_settings, tag) # Remove all memory tags for platforms that do not support them - remove_memory = expt_config["platform"].get("REMOVE_MEMORY") + remove_memory = platform_config.get("REMOVE_MEMORY") if remove_memory: _remove_tag(rocoto_tasks, "memory") - for part in ['PARTITION_HPSS', 'PARTITION_DEFAULT', 'PARTITION_FCST']: - partition = expt_config["platform"].get(part) + for part in ["PARTITION_HPSS", "PARTITION_DEFAULT", "PARTITION_FCST"]: + partition = platform_config.get(part) if not partition: _remove_tag(rocoto_tasks, 'partition') @@ -492,29 +521,37 @@ def _remove_tag(tasks, tag): vx_metatasks_all = {} vx_fields_all["CCPA"] = ["APCP"] - vx_metatasks_all["CCPA"] = ["metatask_PcpCombine_obs", - "metatask_PcpCombine_fcst_APCP_all_accums_all_mems", - "metatask_GridStat_CCPA_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_CCPA", - "metatask_GridStat_CCPA_ensmeanprob_all_accums"] + vx_metatasks_all["CCPA"] = [ + "metatask_PcpCombine_obs", + "metatask_PcpCombine_fcst_APCP_all_accums_all_mems", + "metatask_GridStat_CCPA_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_CCPA", + "metatask_GridStat_CCPA_ensmeanprob_all_accums", + ] vx_fields_all["NOHRSC"] = ["ASNOW"] - vx_metatasks_all["NOHRSC"] = ["task_get_obs_nohrsc", - "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems", - "metatask_GridStat_NOHRSC_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_NOHRSC", - "metatask_GridStat_NOHRSC_ensmeanprob_all_accums"] + vx_metatasks_all["NOHRSC"] = [ + "task_get_obs_nohrsc", + "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems", + "metatask_GridStat_NOHRSC_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_NOHRSC", + "metatask_GridStat_NOHRSC_ensmeanprob_all_accums", + ] vx_fields_all["MRMS"] = ["REFC", "RETOP"] - vx_metatasks_all["MRMS"] = ["metatask_GridStat_MRMS_all_mems", - "metatask_GenEnsProd_EnsembleStat_MRMS", - "metatask_GridStat_MRMS_ensprob"] + vx_metatasks_all["MRMS"] = [ + "metatask_GridStat_MRMS_all_mems", + "metatask_GenEnsProd_EnsembleStat_MRMS", + "metatask_GridStat_MRMS_ensprob", + ] vx_fields_all["NDAS"] = ["ADPSFC", "ADPUPA"] - vx_metatasks_all["NDAS"] = ["task_run_MET_Pb2nc_obs", - "metatask_PointStat_NDAS_all_mems", - "metatask_GenEnsProd_EnsembleStat_NDAS", - "metatask_PointStat_NDAS_ensmeanprob"] + vx_metatasks_all["NDAS"] = [ + "task_run_MET_Pb2nc_obs", + "metatask_PointStat_NDAS_all_mems", + "metatask_GenEnsProd_EnsembleStat_NDAS", + "metatask_PointStat_NDAS_ensmeanprob", + ] # Get the vx fields specified in the experiment configuration. vx_fields_config = expt_config["verification"]["VX_FIELDS"] @@ -523,23 +560,27 @@ def _remove_tag(tasks, tag): # for all observation types. if not vx_fields_config: metatask = "metatask_check_post_output_all_mems" - rocoto_config['tasks'].pop(metatask) + rocoto_config["tasks"].pop(metatask) # If for a given obstype no fields are specified, remove all vx metatasks # for that obstype. for obstype in vx_fields_all: - vx_fields_obstype = [field for field in vx_fields_config if field in vx_fields_all[obstype]] + vx_fields_obstype = [ + field for field in vx_fields_config if field in vx_fields_all[obstype] + ] if not vx_fields_obstype: for metatask in vx_metatasks_all[obstype]: - if metatask in rocoto_config['tasks']: - logging.info(dedent( - f""" + if metatask in rocoto_config["tasks"]: + logging.info( + dedent( + f""" Removing verification [meta]task "{metatask}" from workflow since no fields belonging to observation type "{obstype}" are specified for verification.""" - )) - rocoto_config['tasks'].pop(metatask) + ) + ) + rocoto_config["tasks"].pop(metatask) # # ----------------------------------------------------------------------- @@ -605,7 +646,6 @@ def _get_location(xcs, fmt, expt_cfg): {data_key} = \"{basedir}\"''' ) - # Make sure the vertical coordinate file for both make_lbcs and # make_ics is the same. vcoord_files = {} @@ -637,28 +677,32 @@ def _get_location(xcs, fmt, expt_cfg): fcst_config = expt_config["task_run_fcst"] grid_config = expt_config["task_make_grid"] + ccpp_suite = workflow_config["CCPP_PHYS_SUITE"] # Warn if user has specified a large timestep inappropriately hires_ccpp_suites = ["FV3_RRFS_v1beta", "FV3_WoFS_v0", "FV3_HRRR"] - if workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites: + if ccpp_suite in hires_ccpp_suites: dt = fcst_config.get("DT_ATMOS") if dt: if dt > 40: - logger.warning(dedent( - f""" - WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short + logger.warning( + dedent( + f""" + WARNING: CCPP suite {ccpp_suite} requires short time step regardless of grid resolution. The user-specified value DT_ATMOS = {fcst_config.get("DT_ATMOS")} may result in CFL violations or other errors! """ - )) + ) + ) + quilting = fcst_config["fv3"]["model_configure"]["update_values"]["quilting"] # Gather the pre-defined grid parameters, if needed - if workflow_config.get("PREDEF_GRID_NAME"): + if predef_grid_name := (workflow_config.get("PREDEF_GRID_NAME")): grid_params = set_predef_grid_params( USHdir, workflow_config["PREDEF_GRID_NAME"], - fcst_config["QUILTING"], + quilting, ) # Users like to change these variables, so don't overwrite them special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] @@ -671,27 +715,47 @@ def _get_location(xcs, fmt, expt_cfg): continue # DT_ATMOS needs special treatment based on CCPP suite elif param == "DT_ATMOS": - if workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites and grid_params[param] > 40: - logger.warning(dedent( - f""" - WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short + if ( + ccpp_suite in hires_ccpp_suites + and grid_params[param] > 40 + ): + logger.warning( + dedent( + f""" + WARNING: CCPP suite {ccpp_suite} requires short time step regardless of grid resolution; setting DT_ATMOS to 40.\n This value can be overwritten in the user config file. """ - )) + ) + ) fcst_config[param] = 40 else: fcst_config[param] = value else: fcst_config[param] = value elif param.startswith("WRTCMP"): - if fcst_config.get(param).strip("'") == "": + if fcst_config.get(param, "").strip("'") == "": fcst_config[param] = value elif param == "GRID_GEN_METHOD": workflow_config[param] = value else: grid_config[param] = value + # This logic belongs in predef_grid_params.yaml once merged with make_grid integration. + if predef_grid_name == "RRFS_NA_3km": + fv3_namelist = fcst_config["fv3"]["namelist"] + fv3_namlelist["update_values"]["fms2_io_nml"][ + "netcdf_default_format" + ] = "netcdf4" + + # Load model write component grid settings + quilting_cfg = get_yaml_config(Path(USHdir, "quilting.yaml")) + if not quilting: + update_dict(quilting_cfg["no_quilting"], expt_config) + else: + write_grid = fcst_config["WRTCMP_output_grid"] + update_dict(quilting_cfg[write_grid], expt_config) + run_envir = expt_config["user"].get("RUN_ENVIR", "") fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") @@ -707,19 +771,19 @@ def _get_location(xcs, fmt, expt_cfg): if 24 / incr_cycl_freq != len(fcst_len_cycl): # Also allow for the possibility that the user is running # cycles for less than a day: - num_cycles = len(set_cycle_dates( - date_first_cycl, - date_last_cycl, - incr_cycl_freq)) + num_cycles = len( + set_cycle_dates(date_first_cycl, date_last_cycl, incr_cycl_freq) + ) if num_cycles != len(fcst_len_cycl): - logger.error(f""" The number of entries in FCST_LEN_CYCL does + logger.error( + f""" The number of entries in FCST_LEN_CYCL does not divide evenly into a 24 hour day or the number of cycles in your experiment! FCST_LEN_CYCL = {fcst_len_cycl} """ - ) - raise ValueError + ) + raise ValueError # Build cycledef entries for the long forecasts # Short forecast cycles will be relevant to all intended @@ -733,7 +797,7 @@ def _get_location(xcs, fmt, expt_cfg): # Find the entries that match the long forecast, and map them to # their time of day. long_fcst_len = max(fcst_len_cycl) - long_indices = [i for i,x in enumerate(fcst_len_cycl) if x == long_fcst_len] + long_indices = [i for i, x in enumerate(fcst_len_cycl) if x == long_fcst_len] long_cycles = [i * incr_cycl_freq for i in long_indices] # add one forecast entry per cycle per day @@ -772,6 +836,14 @@ def _get_location(xcs, fmt, expt_cfg): LBC_SPEC_INTVL_HRS = {lbc_spec_intvl_hrs}""" ) + # if inline post is selected, add a upp block to task_run_fcst + if fcst_config["fv3"]["model_configure"]["update_values"]["write_dopost"]: + upp_config = copy.deepcopy(expt_config["task_run_post"]["upp"]) + upp_namelist_config = upp_config["namelist"]["update_values"] + for key in ("datestr", "filename", "filenameflux", "grib", "ioform"): + upp_namelist_config["model_inputs"].pop(key) + upp_config["rundir"] = fcst_config["fv3"]["rundir"] + fcst_config["upp"] = upp_config # # ----------------------------------------------------------------------- # @@ -836,116 +908,84 @@ def _get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - # Set magnitude of stochastic ad-hoc schemes to -999.0 if they are not - # being used. This is required at the moment, since "do_shum/sppt/skeb" - # does not override the use of the scheme unless the magnitude is also - # specifically set to -999.0. If all "do_shum/sppt/skeb" are set to - # "false," then none will run, regardless of the magnitude values. + # Update the FV3 configuration, if needed # # ----------------------------------------------------------------------- - # + + use_merra_climo = ccpp_suite in ( + "FV3_GFS_v15_thompson_mynn_lam3km", + "FV3_GFS_v17_p8", + ) + workflow_config["USE_MERRA_CLIMO"] = use_merra_climo + # Add more fix files if MERRA2 files are needed + if use_merra_climo: + aero_files = {} + fix_clim = Path(workflow_config["FIXclim"]) + fix_files = glob.glob("merra2.aerclim*.nc", + root_dir=Path(platform_config["FIXaer"])) + for file_path in fix_files: + fp = Path(file_path) + fn_month = fp.stem.split(".")[-1] + aero_files[f"aeroclim.{fn_month}.nc"] = str(fix_clim / fp.name) + + fix_files = glob.glob("optics*.dat", + root_dir=Path(platform_config["FIXlut"])) + for file_path in fix_files: + fp = Path(file_path) + linkname = ".".join(fp.name.split(".")[0::2]) + aero_files[linkname] = str(fix_clim / fp.name) + expt_config.update_from({"task_run_fcst": {"fv3": {"files_to_link": aero_files}}}) + + + # Check to make sure all SPP and LSM_SPP lists are the same length. global_sect = expt_config["global"] - if not global_sect.get("DO_SHUM"): - global_sect["SHUM_MAG"] = -999.0 - if not global_sect.get("DO_SKEB"): - global_sect["SKEB_MAG"] = -999.0 - if not global_sect.get("DO_SPPT"): - global_sect["SPPT_MAG"] = -999.0 - # - # ----------------------------------------------------------------------- - # - # If running with SPP in MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or - # RRTMG, count the number of entries in SPP_VAR_LIST to correctly set - # N_VAR_SPP, otherwise set it to zero. - # - # ----------------------------------------------------------------------- - # if global_sect.get("DO_SPP"): - global_sect["N_VAR_SPP"] = len(global_sect["SPP_VAR_LIST"]) - else: - global_sect["N_VAR_SPP"] = 0 - # - # ----------------------------------------------------------------------- - # - # If running with SPP, confirm that each SPP-related namelist value - # contains the same number of entries as N_VAR_SPP (set above to be equal - # to the number of entries in SPP_VAR_LIST). - # - # ----------------------------------------------------------------------- - # - spp_vars = [ - "SPP_MAG_LIST", - "SPP_LSCALE", - "SPP_TSCALE", - "SPP_SIGTOP1", - "SPP_SIGTOP2", - "SPP_STDDEV_CUTOFF", - "ISEED_SPP", - ] + stoch_config = fcst_config["fv3"]["namelist"]["update_values"]["nam_sppperts"] + list_vars = ( + "iseed_spp", + "spp_lscale", + "spp_prt_list", + "spp_sigtop1", + "spp_sigtop2", + "spp_stddev_cutoff", + "spp_tau", + "spp_var_list", + ) + list_len = fcst_config["fv3"]["namelist"]["update_values"]["n_var_spp"] + if any([len(stoch_config[v]) != list_len for v in list_vars]): + report = "\n".join([f"{v}: {len(stoch_config[v])}" for v in list_vars]) + raise Exception( + f""" + All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist + variables must be of length equal to namelist setting + "n_var_spp". + n_var_spp: {list_len} + + Relevant namelist settings have counts: + + {report} + """ + ) - if global_sect.get("DO_SPP"): - for spp_var in spp_vars: - if len(global_sect[spp_var]) != global_sect["N_VAR_SPP"]: - raise Exception( - f""" - All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist - variables must be of equal length to SPP_VAR_LIST: - SPP_VAR_LIST (length {global_sect['N_VAR_SPP']}) - {spp_var} (length {len(global_sect[spp_var])}) - """ - ) - # - # ----------------------------------------------------------------------- - # - # If running with Noah or RUC-LSM SPP, count the number of entries in - # LSM_SPP_VAR_LIST to correctly set N_VAR_LNDP, otherwise set it to zero. - # Also set LNDP_TYPE to 2 for LSM SPP, otherwise set it to zero. Finally, - # initialize an "FHCYC_LSM_SPP" variable to 0 and set it to 999 if LSM SPP - # is turned on. This requirement is necessary since LSM SPP cannot run with - # FHCYC=0 at the moment, but FHCYC cannot be set to anything less than the - # length of the forecast either. A bug fix will be submitted to - # ufs-weather-model soon, at which point, this requirement can be removed - # from regional_workflow. - # - # ----------------------------------------------------------------------- - # - if global_sect.get("DO_LSM_SPP"): - global_sect["N_VAR_LNDP"] = len(global_sect["LSM_SPP_VAR_LIST"]) - global_sect["LNDP_TYPE"] = 2 - global_sect["LNDP_MODEL_TYPE"] = 2 - global_sect["FHCYC_LSM_SPP_OR_NOT"] = 999 - else: - global_sect["N_VAR_LNDP"] = 0 - global_sect["LNDP_TYPE"] = 0 - global_sect["LNDP_MODEL_TYPE"] = 0 - global_sect["FHCYC_LSM_SPP_OR_NOT"] = 0 - # - # ----------------------------------------------------------------------- - # - # If running with LSM SPP, confirm that each LSM SPP-related namelist - # value contains the same number of entries as N_VAR_LNDP (set above to - # be equal to the number of entries in LSM_SPP_VAR_LIST). - # - # ----------------------------------------------------------------------- - # - lsm_spp_vars = [ - "LSM_SPP_MAG_LIST", - "LSM_SPP_LSCALE", - "LSM_SPP_TSCALE", - ] if global_sect.get("DO_LSM_SPP"): - for lsm_spp_var in lsm_spp_vars: - if len(global_sect[lsm_spp_var]) != global_sect["N_VAR_LNDP"]: - raise Exception( - f""" - All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist - variables must be of equal length to SPP_VAR_LIST: - All Noah or RUC-LSM SPP-related namelist variables (except ISEED_LSM_SPP) - must be equal of equal length to LSM_SPP_VAR_LIST: - LSM_SPP_VAR_LIST (length {global_sect['N_VAR_LNDP']}) - {lsm_spp_var} (length {len(global_sect[lsm_spp_var])} - """ - ) + stoch_config = fcst_config["fv3"]["namelist"]["update_values"]["nam_sfcperts"] + list_vars = ("lndp_tau", "lndp_lscale", "lndp_var_list", "lndp_prt_list") + list_len = fcst_config["namelist"]["update_values"]["n_var_lndp"] + if any([len(stoch_config[v]) != list_len for v in list_vars]): + report = "\n".join([f"{v}: {len(stoch_config[v])}" for v in list_vars]) + raise Exception( + f""" + All Noah or RUC-LSM SPP-related namelist variables (except ISEED_LSM_SPP) + must be equal of equal length to the n_var_lndp namelist + setting: + n_var_lndp: {list_len} + {lsm_spp_var} (length {len(global_sect[lsm_spp_var])} + + Relevant namelist settings have counts: + + {report} + """ + ) # Check whether the forecast length (FCST_LEN_HRS) is evenly divisible # by the BC update interval (LBC_SPEC_INTVL_HRS). If so, generate an @@ -971,7 +1011,6 @@ def _get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - # If using external CRTM fix files to allow post-processing of synthetic # satellite products from the UPP, make sure the CRTM fix file directory exists. if global_sect.get("USE_CRTM"): @@ -1026,13 +1065,14 @@ def _get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # # Use env variables for NCO variables and create NCO directories - workflow_manager = expt_config["platform"].get("WORKFLOW_MANAGER") + workflow_manager = platform_config.get("WORKFLOW_MANAGER") if run_envir == "nco" and workflow_manager == "rocoto": # Update the rocoto string for the fcst output location if # running an ensemble in nco mode if global_sect["DO_ENSEMBLE"]: - rocoto_config["entities"]["FCST_DIR"] = \ - "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" + rocoto_config["entities"][ + "FCST_DIR" + ] = "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') @@ -1103,13 +1143,14 @@ def _get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # # Get list of all top-level tasks and metatasks in the workflow. - task_defs = rocoto_config.get('tasks') + task_defs = rocoto_config.get("tasks") all_tasks = [task for task in task_defs] # Get list of all valid top-level tasks and metatasks pertaining to ensemble # verification. ens_vx_task_defns = load_config_file( - os.path.join(USHdir, os.pardir, "parm", "wflow", "verify_ens.yaml")) + os.path.join(USHdir, os.pardir, "parm", "wflow", "verify_ens.yaml") + ) ens_vx_valid_tasks = [task for task in ens_vx_task_defns] # Get list of all valid top-level tasks and metatasks in the workflow that @@ -1122,14 +1163,15 @@ def _get_location(xcs, fmt, expt_cfg): do_ensemble = global_sect["DO_ENSEMBLE"] if (not do_ensemble) and ens_vx_tasks: task_str = " " + "\n ".join(ens_vx_tasks) - msg = dedent(f""" + msg = dedent( + f""" Ensemble verification can not be run unless running in ensemble mode: DO_ENSEMBLE = \"{do_ensemble}\" Ensemble verification tasks: - """) - msg = "".join([msg, task_str, dedent(f""" + {task_str} Please set DO_ENSEMBLE to True or remove ensemble vx tasks from the - workflow.""")]) + workflow.""" + ) raise Exception(msg) # @@ -1173,12 +1215,10 @@ def _dict_find(user_dict, substring): # Flags for creating symlinks to pre-generated grid, orography, and sfc_climo files. # These consider dependencies of other tasks on each pre-processing task. create_symlinks_to_pregen_files = { - "GRID": (not run_make_grid) and \ - (run_make_orog or run_make_sfc_climo or run_any_coldstart_task), - "OROG": (not run_make_orog) and \ - (run_make_sfc_climo or run_any_coldstart_task), - "SFC_CLIMO": (not run_make_sfc_climo) and \ - (run_make_ics or run_make_lbcs), + "GRID": (not run_make_grid) + and (run_make_orog or run_make_sfc_climo or run_any_coldstart_task), + "OROG": (not run_make_orog) and (run_make_sfc_climo or run_any_coldstart_task), + "SFC_CLIMO": (not run_make_sfc_climo) and (run_make_ics or run_make_lbcs), } fixed_files = expt_config["fixed_files"] @@ -1221,7 +1261,7 @@ def _dict_find(user_dict, substring): file_group=prep_task.lower(), source_dir=task_dir, target_dir=workflow_config["FIXlam"], - ccpp_phys_suite=workflow_config["CCPP_PHYS_SUITE"], + ccpp_phys_suite=ccpp_suite, constants=expt_config["constants"], dot_or_uscore=workflow_config["DOT_OR_USCORE"], nhw=grid_params["NHW"], @@ -1256,6 +1296,13 @@ def _dict_find(user_dict, substring): workflow_config["RES_IN_FIXLAM_FILENAMES"] = res_in_fixlam_filenames if res_in_fixlam_filenames: workflow_config["CRES"] = f"C{res_in_fixlam_filenames}" + elif cres := os.getenv("CRES"): + workflow_config["CRES"] = cres + + + + + # # ----------------------------------------------------------------------- @@ -1265,9 +1312,9 @@ def _dict_find(user_dict, substring): # # ----------------------------------------------------------------------- # - if fcst_config["WRITE_DOPOST"]: + if fcst_config["fv3"]["model_configure"]["update_values"]["write_dopost"]: # Turn off run_post - task_name = 'metatask_run_ens_post' + task_name = "metatask_run_ens_post" removed_task = task_defs.pop(task_name, None) if removed_task: logger.warning( @@ -1292,33 +1339,34 @@ def _dict_find(user_dict, substring): ccpp_suite_xml = load_xml_file(workflow_config["CCPP_PHYS_SUITE_IN_CCPP_FP"]) # Need to track if we are using RUC LSM for the make_ics step - workflow_config["SDF_USES_RUC_LSM"] = has_tag_with_value(ccpp_suite_xml, "scheme", "lsm_ruc") + workflow_config["SDF_USES_RUC_LSM"] = has_tag_with_value( + ccpp_suite_xml, "scheme", "lsm_ruc" + ) # Thompson microphysics needs additional input files and namelist settings - workflow_config["SDF_USES_THOMPSON_MP"] = has_tag_with_value(ccpp_suite_xml, "scheme", "mp_thompson") + workflow_config["SDF_USES_THOMPSON_MP"] = has_tag_with_value( + ccpp_suite_xml, "scheme", "mp_thompson" + ) if workflow_config["SDF_USES_THOMPSON_MP"]: - - logging.debug(f'Selected CCPP suite ({workflow_config["CCPP_PHYS_SUITE"]}) uses Thompson MP') - logging.debug(f'Setting up links for additional fix files') + logging.debug( + f'Selected CCPP suite ({ccpp_suite}) uses Thompson MP' + ) + logging.debug(f"Setting up links for additional fix files") # If the model ICs or BCs are not from RAP or HRRR, they will not contain aerosol # climatology data needed by the Thompson scheme, so we need to provide a separate file - if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RRFS", "RAP"] or - get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RRFS", "RAP"]): - fixed_files["THOMPSON_FIX_FILES"].append(workflow_config["THOMPSON_MP_CLIMO_FN"]) - - # Add thompson-specific fix files to CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING and - # FIXgsm_FILES_TO_COPY_TO_FIXam; see parm/fixed_files_mapping.yaml for more info on these variables - - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend(fixed_files["THOMPSON_FIX_FILES"]) - - for fix_file in fixed_files["THOMPSON_FIX_FILES"]: - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append(f"{fix_file} | {fix_file}") - - logging.debug(f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}') - logging.debug(f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}') + # Add thompson-specific fix files to the FV3 configuration + thompson_files = fixed_files["THOMPSON_FIX_FILES"] + if get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RRFS", "RAP"] or get_extrn_lbcs[ + "EXTRN_MDL_NAME_LBCS" + ] not in ["HRRR", "RRFS", "RAP"]: + thompson_files.append(workflow_config["THOMPSON_MP_CLIMO_FN"]) + # Add thompson-specific fix files to the FV3 configuration + fixam = workflow_config["FIXam"] + thompson_fix_links = {fn: f"{fixam}/{fn}" for fn in thompson_files} + fcst_config["fv3"]["files_to_link"].update(thompson_fix_links) # # ----------------------------------------------------------------------- @@ -1339,6 +1387,12 @@ def _dict_find(user_dict, substring): all_lines = cfg_to_yaml_str(expt_config) log_info(all_lines, verbose=debug) + + # Write out a base namelist for the physics suite + physics_base_nml = get_nml_config(expt_config.pop("fv3_namelist_settings")) + base_nml_path = Path(fcst_config["fv3"]["namelist"]["base_file"]) + physics_base_nml.dump(base_nml_path) + global_var_defns_fp = workflow_config["GLOBAL_VAR_DEFNS_FP"] # print info message log_info( @@ -1380,9 +1434,9 @@ def _dict_find(user_dict, substring): logging.error(f"Experiment configuration is not valid against schema") sys.exit(1) - return expt_config + def clean_rocoto_dict(rocotodict): """Removes any invalid entries from ``rocotodict``. Examples of invalid entries are: @@ -1400,7 +1454,9 @@ def clean_rocoto_dict(rocotodict): elif key.split("_", maxsplit=1)[0] in ["task"]: if not rocotodict[key].get("command"): popped = rocotodict.pop(key) - logging.warning(f"Invalid task {key} removed due to empty/unset run command") + logging.warning( + f"Invalid task {key} removed due to empty/unset run command" + ) logging.debug(f"Removed entry:\n{popped}") # Loop 2: search for metatasks with no tasks in them @@ -1410,7 +1466,7 @@ def clean_rocoto_dict(rocotodict): for key2 in list(rocotodict[key].keys()): if key2.split("_", maxsplit=1)[0] == "metatask": clean_rocoto_dict(rocotodict[key][key2]) - #After above recursion, any nested empty metatasks will have popped themselves + # After above recursion, any nested empty metatasks will have popped themselves if rocotodict[key].get(key2): valid = True elif key2.split("_", maxsplit=1)[0] == "task": @@ -1421,7 +1477,6 @@ def clean_rocoto_dict(rocotodict): logging.debug(f"Removed entry:\n{popped}") - # # ----------------------------------------------------------------------- # diff --git a/ush/stochastic_params.yaml b/ush/stochastic_params.yaml new file mode 100644 index 0000000000..5f64036118 --- /dev/null +++ b/ush/stochastic_params.yaml @@ -0,0 +1,112 @@ +# This file is loaded in setup.py to create an appropriate namelist when flags are turned on in the +# global section. +# +# For detailed documentation of these parameters, see: +# https://stochastic-physics.readthedocs.io/en/ufs_public_release/namelist_options.html +# +do_shum: + fv3: + namelist: + update_values: + gfs_physics_nml: + do_shum: true + nam_stochy: + iseed_shum: !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 2 }}' + new_lscale: true + shum: 0.006 + shum_lscale: 150000 + shum_tau: 21600 + shumint: 3600 +do_skeb: + fv3: + namelist: + update_values: + gfs_physics_nml: + do_skeb: true + nam_stochy: + iseed_skeb: !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 3 }}' + new_lscale: true + skeb: 0.5 + skeb_lscale: 150000 + skebnorm: 1 + skeb_tau: 21600 + skebint: 3600 + skeb_vdof: 10 + + +# LSM perturbations include SMC - soil moisture content (volume fraction), +# VGF - vegetation fraction, ALB - albedo, SAL - salinity, +# EMI - emissivity, ZOL - surface roughness (cm), and STC - soil temperature +# +# Only five perturbations at a time can be applied currently, but all seven +# are shown below. In addition, only one unique iseed value is allowed. +# at the moment, and is used for each pattern. +do_lsm_spp: + fv3: + namelist: + update_values: + gfs_physics_nml: + do_spp: true + n_var_lndp: 7 + lndp_type: 2 + fhcyc: 999 + nam_stochy: + new_lscale: true + nam_sfcperts: + iseed_lndp: + - !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 9 }}' + lndp_type: 2 + lndp_model_type: 2 + lndp_tau: [ 21600, 21600, 21600, 21600, 21600, 21600, 21600 ] + lndp_lscale: [ 150000, 150000, 150000, 150000, 150000, 150000, 150000 ] + lndp_var_list: [ "smc", "vgf", "alb", "sal", "emi", "zol", "stc" ] + lndp_prt_list: [ 0.017, 0.001, 0.001, 0.001, 0.001, 0.001, 0.2 ] + +# SPP perturbs specific tuning parameters within a physics parameterization (unlike SPPT, which +# multiplies overall physics tendencies by a random perturbation field *after* the call to the +# physics suite). Patterns evolve and are applied at each time step. Each SPP option is an array, +# applicable (in order) to the HRRR-based parameterization listed in spp_var_list. +# +# Note that SPP is currently only available for specific physics schemes used in the RAP/HRRR +# physics suite. Users need to be aware of which suite definition file is chosen when turning this +# option on. + +do_spp: + fv3: + namelist: + update_values: + gfs_physics_nml: + do_spp: true + n_var_spp: 5 + nam_stochy: + new_lscale: true + nam_sppperts: + iseed_spp: + - !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 4 }}' + - !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 5 }}' + - !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 6 }}' + - !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 7 }}' + - !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 8 }}' + spp_lscale: [ 150000.0, 150000.0, 150000.0, 150000.0, 150000.0 ] + spp_prt_list: [ 0.2, 0.2, 0.75, 0.2, 0.2 ] + spp_sigtop1: [ 0.1, 0.1, 0.1, 0.1, 0.1 ] + spp_sigtop2: [ 0.025, 0.025, 0.025, 0.025, 0.025 ] + spp_stddev_cutoff: [ 1.5, 1.5, 2.5, 1.5, 1.5 ] + spp_tau: [ 21600.0, 21600.0, 21600.0, 21600.0, 21600.0 ] + spp_var_list: [ "pbl", "sfc", "mp", "rad", "gwd" ] +do_sppt: + fv3: + namelist: + update_values: + gfs_physics_nml: + do_sppt: true + nam_stochy: + iseed_sppt: !int '{{ cdate.strftime("%Y%m%d%H")|int * 1000 * "MEM"|env|int * 10 + 1 }}' + new_lscale: true + sppt: 0.7 + sppt_logit: true + sppt_lscale: 150000 + sppt_sfclimit: true + sppt_tau: 21600 + spptint: 3600 + use_zmtnblck: false diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py deleted file mode 100644 index a40a878957..0000000000 --- a/ush/update_input_nml.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env python3 - -""" -Updates the model namelist for a variety of different settings. -""" - -import argparse -import os -import sys -from textwrap import dedent - -from python_utils import ( - print_input_args, - print_info_msg, - cfg_to_yaml_str, -) - -from uwtools.api.config import get_nml_config, realize - -VERBOSE = os.environ.get("VERBOSE", "true") - -def update_input_nml(namelist, restart, aqm_na_13km): - """ - Updates the FV3 ``input.nml`` file in the specified run directory - - Args: - namelist (str) : Path to the namelist - restart (bool): Whether the forecast should start from restart? - aqm_na_13km (bool): Whether the 13km AQM configuration should be used? - - Returns: - None: Updates ``input.nml`` with the settings provided - """ - - print_input_args(locals()) - settings = {} - - # For restart run - if restart: - settings["fv_core_nml"] = { - "external_ic": False, - "make_nh": False, - "mountain": True, - "na_init": 0, - "nggps_ic": False, - "warm_start": True, - } - - settings["gfs_physics_nml"] = { - "nstf_name": [2, 0, 0, 0, 0], - } - - # For AQM_NA_13km domain for air quality modeling - if aqm_na_13km: - settings["fv_core_nml"] = { - "k_split": 1, - "n_split": 8, - } - - - print_info_msg( - dedent( - f""" - Updating {namelist} - - The updated values are: - - {cfg_to_yaml_str(settings)} - - """ - ), - verbose=VERBOSE, - ) - - # Update the experiment's FV3 INPUT.NML file - realize( - input_config=namelist, - input_format="nml", - output_file=namelist, - output_format="nml", - update_config=get_nml_config(settings), - ) - -def _parse_args(argv): - """Parse command line arguments""" - parser = argparse.ArgumentParser(description="Update FV3 input.nml file for restart.") - - parser.add_argument( - "-n", "--namelist", - dest="namelist", - required=True, - help="Path to namelist to update.", - ) - - parser.add_argument( - "--restart", - action='store_true', - help='Update for restart', - ) - - parser.add_argument( - "--aqm_na_13km", - action='store_true', - help='Update for AQM_NA_13km in air quality modeling', - ) - - return parser.parse_args(argv) - - -if __name__ == "__main__": - args = _parse_args(sys.argv[1:]) - update_input_nml( - namelist=args.namelist, - restart=args.restart, - aqm_na_13km=args.aqm_na_13km, - )