From e59c89bf3eca76cf5cf5bb04e69630e735bab60b Mon Sep 17 00:00:00 2001 From: "Judy.K.Henderson" Date: Fri, 29 Mar 2024 16:51:05 +0000 Subject: [PATCH] Initial commit of Li's branch with Judy's changes for running under Rocky --- .gitmodules | 2 +- FV3GFSwfm/jkh/config.aero | 46 + FV3GFSwfm/jkh/config.aeroanl | 31 + FV3GFSwfm/jkh/config.aeroanlfinal | 10 + FV3GFSwfm/jkh/config.aeroanlinit | 10 + FV3GFSwfm/jkh/config.aeroanlrun | 11 + FV3GFSwfm/jkh/config.aerosol_init | 10 + FV3GFSwfm/jkh/config.anal | 153 +++ FV3GFSwfm/jkh/config.analcalc | 15 + FV3GFSwfm/jkh/config.analdiag | 11 + FV3GFSwfm/jkh/config.arch | 15 + FV3GFSwfm/jkh/config.atmanl | 33 + FV3GFSwfm/jkh/config.atmanlfinal | 10 + FV3GFSwfm/jkh/config.atmanlinit | 11 + FV3GFSwfm/jkh/config.atmanlrun | 11 + FV3GFSwfm/jkh/config.atmensanl | 23 + FV3GFSwfm/jkh/config.atmensanlfinal | 10 + FV3GFSwfm/jkh/config.atmensanlinit | 11 + FV3GFSwfm/jkh/config.atmensanlrun | 11 + FV3GFSwfm/jkh/config.atmos_products | 40 + FV3GFSwfm/jkh/config.awips | 14 + FV3GFSwfm/jkh/config.base | 397 ++++++ FV3GFSwfm/jkh/config.catchem | 53 + FV3GFSwfm/jkh/config.cleanup | 25 + FV3GFSwfm/jkh/config.com | 97 ++ FV3GFSwfm/jkh/config.earc | 17 + FV3GFSwfm/jkh/config.ecen | 21 + FV3GFSwfm/jkh/config.echgres | 11 + FV3GFSwfm/jkh/config.ediag | 11 + FV3GFSwfm/jkh/config.efcs | 102 ++ FV3GFSwfm/jkh/config.eobs | 31 + FV3GFSwfm/jkh/config.epos | 20 + FV3GFSwfm/jkh/config.esfc | 30 + FV3GFSwfm/jkh/config.eupd | 34 + FV3GFSwfm/jkh/config.fcst | 295 ++++ FV3GFSwfm/jkh/config.fit2obs | 23 + FV3GFSwfm/jkh/config.gempak | 11 + FV3GFSwfm/jkh/config.genesis | 12 + FV3GFSwfm/jkh/config.genesis_fsu | 12 + FV3GFSwfm/jkh/config.ice | 14 + FV3GFSwfm/jkh/config.metp | 100 ++ FV3GFSwfm/jkh/config.mos | 9 + FV3GFSwfm/jkh/config.mos_ext_grd_fcst | 12 + FV3GFSwfm/jkh/config.mos_ext_grd_prdgen | 12 + FV3GFSwfm/jkh/config.mos_ext_grd_prep | 12 + FV3GFSwfm/jkh/config.mos_ext_stn_fcst | 12 + FV3GFSwfm/jkh/config.mos_ext_stn_prdgen | 12 + FV3GFSwfm/jkh/config.mos_ext_stn_prep | 12 + FV3GFSwfm/jkh/config.mos_grd_fcst | 12 + FV3GFSwfm/jkh/config.mos_grd_prdgen | 12 + FV3GFSwfm/jkh/config.mos_grd_prep | 12 + FV3GFSwfm/jkh/config.mos_stn_fcst | 12 + FV3GFSwfm/jkh/config.mos_stn_prdgen | 12 + FV3GFSwfm/jkh/config.mos_stn_prep | 12 + FV3GFSwfm/jkh/config.mos_wx_ext_prdgen | 12 + FV3GFSwfm/jkh/config.mos_wx_prdgen | 12 + FV3GFSwfm/jkh/config.npoess | 11 + FV3GFSwfm/jkh/config.nsst | 39 + FV3GFSwfm/jkh/config.oceanice_products | 15 + FV3GFSwfm/jkh/config.ocn | 29 + FV3GFSwfm/jkh/config.ocnanal | 26 + FV3GFSwfm/jkh/config.ocnanalbmat | 11 + FV3GFSwfm/jkh/config.ocnanalchkpt | 11 + FV3GFSwfm/jkh/config.ocnanalecen | 11 + FV3GFSwfm/jkh/config.ocnanalpost | 10 + FV3GFSwfm/jkh/config.ocnanalprep | 10 + FV3GFSwfm/jkh/config.ocnanalrun | 11 + FV3GFSwfm/jkh/config.ocnanalvrfy | 10 + FV3GFSwfm/jkh/config.postsnd | 14 + FV3GFSwfm/jkh/config.prep | 61 + FV3GFSwfm/jkh/config.prepatmiodaobs | 11 + FV3GFSwfm/jkh/config.prepchem | 18 + FV3GFSwfm/jkh/config.prepoceanobs | 20 + FV3GFSwfm/jkh/config.prepsnowobs | 21 + FV3GFSwfm/jkh/config.resources | 1213 +++++++++++++++++ FV3GFSwfm/jkh/config.sfcanl | 16 + FV3GFSwfm/jkh/config.snowanl | 30 + FV3GFSwfm/jkh/config.stage_ic | 41 + FV3GFSwfm/jkh/config.tracker | 12 + FV3GFSwfm/jkh/config.tropcy | 15 + FV3GFSwfm/jkh/config.ufs | 512 +++++++ FV3GFSwfm/jkh/config.upp | 16 + FV3GFSwfm/jkh/config.verfozn | 22 + FV3GFSwfm/jkh/config.verfrad | 26 + FV3GFSwfm/jkh/config.vminmon | 15 + FV3GFSwfm/jkh/config.wave | 207 +++ FV3GFSwfm/jkh/config.waveawipsbulls | 13 + FV3GFSwfm/jkh/config.waveawipsgridded | 13 + FV3GFSwfm/jkh/config.wavegempak | 12 + FV3GFSwfm/jkh/config.waveinit | 14 + FV3GFSwfm/jkh/config.wavepostbndpnt | 11 + FV3GFSwfm/jkh/config.wavepostbndpntbll | 11 + FV3GFSwfm/jkh/config.wavepostpnt | 11 + FV3GFSwfm/jkh/config.wavepostsbs | 28 + FV3GFSwfm/jkh/config.waveprep | 27 + FV3GFSwfm/jkh/jkh.crontab | 5 + FV3GFSwfm/jkh/jkh.xml | 326 +++++ FV3GFSwfm/test_catchem/CAT_C96_CCPP.xml | 638 +++++++++ FV3GFSwfm/test_catchem/config.aero | 46 + FV3GFSwfm/test_catchem/config.aeroanl | 31 + FV3GFSwfm/test_catchem/config.aeroanlfinal | 10 + FV3GFSwfm/test_catchem/config.aeroanlinit | 10 + FV3GFSwfm/test_catchem/config.aeroanlrun | 11 + FV3GFSwfm/test_catchem/config.aerosol_init | 10 + FV3GFSwfm/test_catchem/config.anal | 153 +++ FV3GFSwfm/test_catchem/config.analcalc | 15 + FV3GFSwfm/test_catchem/config.analdiag | 11 + FV3GFSwfm/test_catchem/config.arch | 15 + FV3GFSwfm/test_catchem/config.atmanl | 33 + FV3GFSwfm/test_catchem/config.atmanlfinal | 10 + FV3GFSwfm/test_catchem/config.atmanlinit | 11 + FV3GFSwfm/test_catchem/config.atmanlrun | 11 + FV3GFSwfm/test_catchem/config.atmensanl | 23 + FV3GFSwfm/test_catchem/config.atmensanlfinal | 10 + FV3GFSwfm/test_catchem/config.atmensanlinit | 11 + FV3GFSwfm/test_catchem/config.atmensanlrun | 11 + FV3GFSwfm/test_catchem/config.atmos_products | 40 + FV3GFSwfm/test_catchem/config.awips | 14 + FV3GFSwfm/test_catchem/config.base | 396 ++++++ FV3GFSwfm/test_catchem/config.catchem | 53 + FV3GFSwfm/test_catchem/config.cleanup | 25 + FV3GFSwfm/test_catchem/config.com | 97 ++ FV3GFSwfm/test_catchem/config.earc | 17 + FV3GFSwfm/test_catchem/config.ecen | 21 + FV3GFSwfm/test_catchem/config.echgres | 11 + FV3GFSwfm/test_catchem/config.ediag | 11 + FV3GFSwfm/test_catchem/config.efcs | 102 ++ FV3GFSwfm/test_catchem/config.eobs | 31 + FV3GFSwfm/test_catchem/config.epos | 20 + FV3GFSwfm/test_catchem/config.esfc | 30 + FV3GFSwfm/test_catchem/config.eupd | 34 + FV3GFSwfm/test_catchem/config.fcst | 295 ++++ FV3GFSwfm/test_catchem/config.fit2obs | 23 + FV3GFSwfm/test_catchem/config.gempak | 11 + FV3GFSwfm/test_catchem/config.genesis | 12 + FV3GFSwfm/test_catchem/config.genesis_fsu | 12 + FV3GFSwfm/test_catchem/config.ice | 14 + FV3GFSwfm/test_catchem/config.metp | 100 ++ FV3GFSwfm/test_catchem/config.mos | 9 + .../test_catchem/config.mos_ext_grd_fcst | 12 + .../test_catchem/config.mos_ext_grd_prdgen | 12 + .../test_catchem/config.mos_ext_grd_prep | 12 + .../test_catchem/config.mos_ext_stn_fcst | 12 + .../test_catchem/config.mos_ext_stn_prdgen | 12 + .../test_catchem/config.mos_ext_stn_prep | 12 + FV3GFSwfm/test_catchem/config.mos_grd_fcst | 12 + FV3GFSwfm/test_catchem/config.mos_grd_prdgen | 12 + FV3GFSwfm/test_catchem/config.mos_grd_prep | 12 + FV3GFSwfm/test_catchem/config.mos_stn_fcst | 12 + FV3GFSwfm/test_catchem/config.mos_stn_prdgen | 12 + FV3GFSwfm/test_catchem/config.mos_stn_prep | 12 + .../test_catchem/config.mos_wx_ext_prdgen | 12 + FV3GFSwfm/test_catchem/config.mos_wx_prdgen | 12 + FV3GFSwfm/test_catchem/config.npoess | 11 + FV3GFSwfm/test_catchem/config.nsst | 39 + .../test_catchem/config.oceanice_products | 15 + FV3GFSwfm/test_catchem/config.ocn | 29 + FV3GFSwfm/test_catchem/config.ocnanal | 26 + FV3GFSwfm/test_catchem/config.ocnanalbmat | 11 + FV3GFSwfm/test_catchem/config.ocnanalchkpt | 11 + FV3GFSwfm/test_catchem/config.ocnanalecen | 11 + FV3GFSwfm/test_catchem/config.ocnanalpost | 10 + FV3GFSwfm/test_catchem/config.ocnanalprep | 10 + FV3GFSwfm/test_catchem/config.ocnanalrun | 11 + FV3GFSwfm/test_catchem/config.ocnanalvrfy | 10 + FV3GFSwfm/test_catchem/config.postsnd | 14 + FV3GFSwfm/test_catchem/config.prep | 61 + FV3GFSwfm/test_catchem/config.prepatmiodaobs | 11 + FV3GFSwfm/test_catchem/config.prepchem | 23 + FV3GFSwfm/test_catchem/config.prepoceanobs | 20 + FV3GFSwfm/test_catchem/config.prepsnowobs | 21 + FV3GFSwfm/test_catchem/config.resources | 1213 +++++++++++++++++ FV3GFSwfm/test_catchem/config.sfcanl | 16 + FV3GFSwfm/test_catchem/config.snowanl | 30 + FV3GFSwfm/test_catchem/config.stage_ic | 41 + FV3GFSwfm/test_catchem/config.tracker | 12 + FV3GFSwfm/test_catchem/config.tropcy | 15 + FV3GFSwfm/test_catchem/config.ufs | 512 +++++++ FV3GFSwfm/test_catchem/config.upp | 16 + FV3GFSwfm/test_catchem/config.verfozn | 22 + FV3GFSwfm/test_catchem/config.verfrad | 26 + FV3GFSwfm/test_catchem/config.vminmon | 15 + FV3GFSwfm/test_catchem/config.wave | 207 +++ FV3GFSwfm/test_catchem/config.waveawipsbulls | 13 + .../test_catchem/config.waveawipsgridded | 13 + FV3GFSwfm/test_catchem/config.wavegempak | 12 + FV3GFSwfm/test_catchem/config.waveinit | 14 + FV3GFSwfm/test_catchem/config.wavepostbndpnt | 11 + .../test_catchem/config.wavepostbndpntbll | 11 + FV3GFSwfm/test_catchem/config.wavepostpnt | 11 + FV3GFSwfm/test_catchem/config.wavepostsbs | 28 + FV3GFSwfm/test_catchem/config.waveprep | 27 + FV3GFSwfm/test_catchem/jkhINFO | 1 + FV3GFSwfm/test_catchem/logs/2024022100.log | 4 + FV3GFSwfm/test_catchem/logs/2024022200.log | 4 + FV3GFSwfm/test_catchem/logs/2024032100.log | 71 + FV3GFSwfm/test_catchem/logs/2024032800.log | 16 + FV3GFSwfm/test_catchem/noent_test.xml | 345 +++++ FV3GFSwfm/test_catchem/rt_p8-chem.xml | 304 +++++ FV3GFSwfm/test_catchem/runcmds | 4 + FV3GFSwfm/test_catchem/test_catchem.crontab | 5 + FV3GFSwfm/test_catchem/test_catchem.db | Bin 0 -> 28672 bytes FV3GFSwfm/test_catchem/test_catchem.xml | 363 +++++ FV3GFSwfm/test_catchem/test_catchem.xml_gen | 326 +++++ FV3GFSwfm/test_catchem/test_catchem_lock.db | Bin 0 -> 8192 bytes INFO | 26 + env/HERA.env | 7 +- files_chgd | 25 + jobs/rocoto/prepchem.sh | 166 +-- jobs/rocoto/prepchem.sh-li | 280 ++++ parm/config/gfs/config.base | 7 +- parm/config/gfs/config.catchem | 2 +- parm/config/gfs/config.fcst | 6 +- parm/config/gfs/yaml/defaults.yaml | 2 +- parm/post/postxconfig-NT-CCPP-CHEM-F00.txt | 2 +- parm/post/postxconfig-NT-CCPP-CHEM.txt | 2 +- scripts/exglobal_stage_ic.sh | 22 +- scripts/exglobal_stage_ic.sh-li | 91 ++ sorc/build_prepchem_NC.sh | 2 +- versions/build.spack.ver | 3 +- workflow/hosts/hera.yaml | 14 +- workflow/hosts/hera_emc.yaml | 24 + workflow/jkh.sh | 20 + workflow/test_catchem.sh | 20 + 224 files changed, 12196 insertions(+), 109 deletions(-) create mode 100644 FV3GFSwfm/jkh/config.aero create mode 100644 FV3GFSwfm/jkh/config.aeroanl create mode 100644 FV3GFSwfm/jkh/config.aeroanlfinal create mode 100644 FV3GFSwfm/jkh/config.aeroanlinit create mode 100644 FV3GFSwfm/jkh/config.aeroanlrun create mode 100644 FV3GFSwfm/jkh/config.aerosol_init create mode 100644 FV3GFSwfm/jkh/config.anal create mode 100644 FV3GFSwfm/jkh/config.analcalc create mode 100644 FV3GFSwfm/jkh/config.analdiag create mode 100644 FV3GFSwfm/jkh/config.arch create mode 100644 FV3GFSwfm/jkh/config.atmanl create mode 100644 FV3GFSwfm/jkh/config.atmanlfinal create mode 100644 FV3GFSwfm/jkh/config.atmanlinit create mode 100644 FV3GFSwfm/jkh/config.atmanlrun create mode 100644 FV3GFSwfm/jkh/config.atmensanl create mode 100644 FV3GFSwfm/jkh/config.atmensanlfinal create mode 100644 FV3GFSwfm/jkh/config.atmensanlinit create mode 100644 FV3GFSwfm/jkh/config.atmensanlrun create mode 100644 FV3GFSwfm/jkh/config.atmos_products create mode 100644 FV3GFSwfm/jkh/config.awips create mode 100644 FV3GFSwfm/jkh/config.base create mode 100644 FV3GFSwfm/jkh/config.catchem create mode 100644 FV3GFSwfm/jkh/config.cleanup create mode 100644 FV3GFSwfm/jkh/config.com create mode 100644 FV3GFSwfm/jkh/config.earc create mode 100644 FV3GFSwfm/jkh/config.ecen create mode 100644 FV3GFSwfm/jkh/config.echgres create mode 100644 FV3GFSwfm/jkh/config.ediag create mode 100644 FV3GFSwfm/jkh/config.efcs create mode 100644 FV3GFSwfm/jkh/config.eobs create mode 100644 FV3GFSwfm/jkh/config.epos create mode 100644 FV3GFSwfm/jkh/config.esfc create mode 100644 FV3GFSwfm/jkh/config.eupd create mode 100644 FV3GFSwfm/jkh/config.fcst create mode 100644 FV3GFSwfm/jkh/config.fit2obs create mode 100644 FV3GFSwfm/jkh/config.gempak create mode 100644 FV3GFSwfm/jkh/config.genesis create mode 100644 FV3GFSwfm/jkh/config.genesis_fsu create mode 100644 FV3GFSwfm/jkh/config.ice create mode 100644 FV3GFSwfm/jkh/config.metp create mode 100644 FV3GFSwfm/jkh/config.mos create mode 100644 FV3GFSwfm/jkh/config.mos_ext_grd_fcst create mode 100644 FV3GFSwfm/jkh/config.mos_ext_grd_prdgen create mode 100644 FV3GFSwfm/jkh/config.mos_ext_grd_prep create mode 100644 FV3GFSwfm/jkh/config.mos_ext_stn_fcst create mode 100644 FV3GFSwfm/jkh/config.mos_ext_stn_prdgen create mode 100644 FV3GFSwfm/jkh/config.mos_ext_stn_prep create mode 100644 FV3GFSwfm/jkh/config.mos_grd_fcst create mode 100644 FV3GFSwfm/jkh/config.mos_grd_prdgen create mode 100644 FV3GFSwfm/jkh/config.mos_grd_prep create mode 100644 FV3GFSwfm/jkh/config.mos_stn_fcst create mode 100644 FV3GFSwfm/jkh/config.mos_stn_prdgen create mode 100644 FV3GFSwfm/jkh/config.mos_stn_prep create mode 100644 FV3GFSwfm/jkh/config.mos_wx_ext_prdgen create mode 100644 FV3GFSwfm/jkh/config.mos_wx_prdgen create mode 100644 FV3GFSwfm/jkh/config.npoess create mode 100644 FV3GFSwfm/jkh/config.nsst create mode 100644 FV3GFSwfm/jkh/config.oceanice_products create mode 100644 FV3GFSwfm/jkh/config.ocn create mode 100644 FV3GFSwfm/jkh/config.ocnanal create mode 100644 FV3GFSwfm/jkh/config.ocnanalbmat create mode 100644 FV3GFSwfm/jkh/config.ocnanalchkpt create mode 100644 FV3GFSwfm/jkh/config.ocnanalecen create mode 100644 FV3GFSwfm/jkh/config.ocnanalpost create mode 100644 FV3GFSwfm/jkh/config.ocnanalprep create mode 100644 FV3GFSwfm/jkh/config.ocnanalrun create mode 100644 FV3GFSwfm/jkh/config.ocnanalvrfy create mode 100644 FV3GFSwfm/jkh/config.postsnd create mode 100644 FV3GFSwfm/jkh/config.prep create mode 100644 FV3GFSwfm/jkh/config.prepatmiodaobs create mode 100755 FV3GFSwfm/jkh/config.prepchem create mode 100644 FV3GFSwfm/jkh/config.prepoceanobs create mode 100644 FV3GFSwfm/jkh/config.prepsnowobs create mode 100644 FV3GFSwfm/jkh/config.resources create mode 100644 FV3GFSwfm/jkh/config.sfcanl create mode 100644 FV3GFSwfm/jkh/config.snowanl create mode 100644 FV3GFSwfm/jkh/config.stage_ic create mode 100644 FV3GFSwfm/jkh/config.tracker create mode 100644 FV3GFSwfm/jkh/config.tropcy create mode 100644 FV3GFSwfm/jkh/config.ufs create mode 100644 FV3GFSwfm/jkh/config.upp create mode 100644 FV3GFSwfm/jkh/config.verfozn create mode 100644 FV3GFSwfm/jkh/config.verfrad create mode 100644 FV3GFSwfm/jkh/config.vminmon create mode 100644 FV3GFSwfm/jkh/config.wave create mode 100644 FV3GFSwfm/jkh/config.waveawipsbulls create mode 100644 FV3GFSwfm/jkh/config.waveawipsgridded create mode 100644 FV3GFSwfm/jkh/config.wavegempak create mode 100644 FV3GFSwfm/jkh/config.waveinit create mode 100644 FV3GFSwfm/jkh/config.wavepostbndpnt create mode 100644 FV3GFSwfm/jkh/config.wavepostbndpntbll create mode 100644 FV3GFSwfm/jkh/config.wavepostpnt create mode 100644 FV3GFSwfm/jkh/config.wavepostsbs create mode 100644 FV3GFSwfm/jkh/config.waveprep create mode 100644 FV3GFSwfm/jkh/jkh.crontab create mode 100644 FV3GFSwfm/jkh/jkh.xml create mode 100644 FV3GFSwfm/test_catchem/CAT_C96_CCPP.xml create mode 100644 FV3GFSwfm/test_catchem/config.aero create mode 100644 FV3GFSwfm/test_catchem/config.aeroanl create mode 100644 FV3GFSwfm/test_catchem/config.aeroanlfinal create mode 100644 FV3GFSwfm/test_catchem/config.aeroanlinit create mode 100644 FV3GFSwfm/test_catchem/config.aeroanlrun create mode 100644 FV3GFSwfm/test_catchem/config.aerosol_init create mode 100644 FV3GFSwfm/test_catchem/config.anal create mode 100644 FV3GFSwfm/test_catchem/config.analcalc create mode 100644 FV3GFSwfm/test_catchem/config.analdiag create mode 100644 FV3GFSwfm/test_catchem/config.arch create mode 100644 FV3GFSwfm/test_catchem/config.atmanl create mode 100644 FV3GFSwfm/test_catchem/config.atmanlfinal create mode 100644 FV3GFSwfm/test_catchem/config.atmanlinit create mode 100644 FV3GFSwfm/test_catchem/config.atmanlrun create mode 100644 FV3GFSwfm/test_catchem/config.atmensanl create mode 100644 FV3GFSwfm/test_catchem/config.atmensanlfinal create mode 100644 FV3GFSwfm/test_catchem/config.atmensanlinit create mode 100644 FV3GFSwfm/test_catchem/config.atmensanlrun create mode 100644 FV3GFSwfm/test_catchem/config.atmos_products create mode 100644 FV3GFSwfm/test_catchem/config.awips create mode 100644 FV3GFSwfm/test_catchem/config.base create mode 100644 FV3GFSwfm/test_catchem/config.catchem create mode 100644 FV3GFSwfm/test_catchem/config.cleanup create mode 100644 FV3GFSwfm/test_catchem/config.com create mode 100644 FV3GFSwfm/test_catchem/config.earc create mode 100644 FV3GFSwfm/test_catchem/config.ecen create mode 100644 FV3GFSwfm/test_catchem/config.echgres create mode 100644 FV3GFSwfm/test_catchem/config.ediag create mode 100644 FV3GFSwfm/test_catchem/config.efcs create mode 100644 FV3GFSwfm/test_catchem/config.eobs create mode 100644 FV3GFSwfm/test_catchem/config.epos create mode 100644 FV3GFSwfm/test_catchem/config.esfc create mode 100644 FV3GFSwfm/test_catchem/config.eupd create mode 100644 FV3GFSwfm/test_catchem/config.fcst create mode 100644 FV3GFSwfm/test_catchem/config.fit2obs create mode 100644 FV3GFSwfm/test_catchem/config.gempak create mode 100644 FV3GFSwfm/test_catchem/config.genesis create mode 100644 FV3GFSwfm/test_catchem/config.genesis_fsu create mode 100644 FV3GFSwfm/test_catchem/config.ice create mode 100644 FV3GFSwfm/test_catchem/config.metp create mode 100644 FV3GFSwfm/test_catchem/config.mos create mode 100644 FV3GFSwfm/test_catchem/config.mos_ext_grd_fcst create mode 100644 FV3GFSwfm/test_catchem/config.mos_ext_grd_prdgen create mode 100644 FV3GFSwfm/test_catchem/config.mos_ext_grd_prep create mode 100644 FV3GFSwfm/test_catchem/config.mos_ext_stn_fcst create mode 100644 FV3GFSwfm/test_catchem/config.mos_ext_stn_prdgen create mode 100644 FV3GFSwfm/test_catchem/config.mos_ext_stn_prep create mode 100644 FV3GFSwfm/test_catchem/config.mos_grd_fcst create mode 100644 FV3GFSwfm/test_catchem/config.mos_grd_prdgen create mode 100644 FV3GFSwfm/test_catchem/config.mos_grd_prep create mode 100644 FV3GFSwfm/test_catchem/config.mos_stn_fcst create mode 100644 FV3GFSwfm/test_catchem/config.mos_stn_prdgen create mode 100644 FV3GFSwfm/test_catchem/config.mos_stn_prep create mode 100644 FV3GFSwfm/test_catchem/config.mos_wx_ext_prdgen create mode 100644 FV3GFSwfm/test_catchem/config.mos_wx_prdgen create mode 100644 FV3GFSwfm/test_catchem/config.npoess create mode 100644 FV3GFSwfm/test_catchem/config.nsst create mode 100644 FV3GFSwfm/test_catchem/config.oceanice_products create mode 100644 FV3GFSwfm/test_catchem/config.ocn create mode 100644 FV3GFSwfm/test_catchem/config.ocnanal create mode 100644 FV3GFSwfm/test_catchem/config.ocnanalbmat create mode 100644 FV3GFSwfm/test_catchem/config.ocnanalchkpt create mode 100644 FV3GFSwfm/test_catchem/config.ocnanalecen create mode 100644 FV3GFSwfm/test_catchem/config.ocnanalpost create mode 100644 FV3GFSwfm/test_catchem/config.ocnanalprep create mode 100644 FV3GFSwfm/test_catchem/config.ocnanalrun create mode 100644 FV3GFSwfm/test_catchem/config.ocnanalvrfy create mode 100644 FV3GFSwfm/test_catchem/config.postsnd create mode 100644 FV3GFSwfm/test_catchem/config.prep create mode 100644 FV3GFSwfm/test_catchem/config.prepatmiodaobs create mode 100755 FV3GFSwfm/test_catchem/config.prepchem create mode 100644 FV3GFSwfm/test_catchem/config.prepoceanobs create mode 100644 FV3GFSwfm/test_catchem/config.prepsnowobs create mode 100644 FV3GFSwfm/test_catchem/config.resources create mode 100644 FV3GFSwfm/test_catchem/config.sfcanl create mode 100644 FV3GFSwfm/test_catchem/config.snowanl create mode 100644 FV3GFSwfm/test_catchem/config.stage_ic create mode 100644 FV3GFSwfm/test_catchem/config.tracker create mode 100644 FV3GFSwfm/test_catchem/config.tropcy create mode 100644 FV3GFSwfm/test_catchem/config.ufs create mode 100644 FV3GFSwfm/test_catchem/config.upp create mode 100644 FV3GFSwfm/test_catchem/config.verfozn create mode 100644 FV3GFSwfm/test_catchem/config.verfrad create mode 100644 FV3GFSwfm/test_catchem/config.vminmon create mode 100644 FV3GFSwfm/test_catchem/config.wave create mode 100644 FV3GFSwfm/test_catchem/config.waveawipsbulls create mode 100644 FV3GFSwfm/test_catchem/config.waveawipsgridded create mode 100644 FV3GFSwfm/test_catchem/config.wavegempak create mode 100644 FV3GFSwfm/test_catchem/config.waveinit create mode 100644 FV3GFSwfm/test_catchem/config.wavepostbndpnt create mode 100644 FV3GFSwfm/test_catchem/config.wavepostbndpntbll create mode 100644 FV3GFSwfm/test_catchem/config.wavepostpnt create mode 100644 FV3GFSwfm/test_catchem/config.wavepostsbs create mode 100644 FV3GFSwfm/test_catchem/config.waveprep create mode 100644 FV3GFSwfm/test_catchem/jkhINFO create mode 100644 FV3GFSwfm/test_catchem/logs/2024022100.log create mode 100644 FV3GFSwfm/test_catchem/logs/2024022200.log create mode 100644 FV3GFSwfm/test_catchem/logs/2024032100.log create mode 100644 FV3GFSwfm/test_catchem/logs/2024032800.log create mode 100644 FV3GFSwfm/test_catchem/noent_test.xml create mode 100644 FV3GFSwfm/test_catchem/rt_p8-chem.xml create mode 100644 FV3GFSwfm/test_catchem/runcmds create mode 100644 FV3GFSwfm/test_catchem/test_catchem.crontab create mode 100644 FV3GFSwfm/test_catchem/test_catchem.db create mode 100644 FV3GFSwfm/test_catchem/test_catchem.xml create mode 100644 FV3GFSwfm/test_catchem/test_catchem.xml_gen create mode 100644 FV3GFSwfm/test_catchem/test_catchem_lock.db create mode 100644 INFO create mode 100644 files_chgd create mode 100755 jobs/rocoto/prepchem.sh-li create mode 100755 scripts/exglobal_stage_ic.sh-li create mode 100644 workflow/hosts/hera_emc.yaml create mode 100755 workflow/jkh.sh create mode 100755 workflow/test_catchem.sh diff --git a/.gitmodules b/.gitmodules index 67587d28f8..e07c86b570 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,7 @@ [submodule "sorc/ufs_model.fd"] path = sorc/ufs_model.fd url = https://github.com/zhanglikate/ufs-weather-model - branch = ufs-catchem + branch = thom-cat ignore = dirty [submodule "sorc/wxflow"] path = sorc/wxflow diff --git a/FV3GFSwfm/jkh/config.aero b/FV3GFSwfm/jkh/config.aero new file mode 100644 index 0000000000..c152fafd12 --- /dev/null +++ b/FV3GFSwfm/jkh/config.aero @@ -0,0 +1,46 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Turn off warnings about unused variables +# shellcheck disable=SC2034 + + +# Path to the input data tree +case ${machine} in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION" | "HERCULES") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine ${machine} unsupported for aerosols" + exit 2 + ;; +esac +export AERO_INPUTS_DIR + +export AERO_DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table.aero" +export AERO_FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table.aero" +# Biomass burning emission dataset. Choose from: gbbepx, qfed, none +export AERO_EMIS_FIRE="qfed" +# Directory containing GOCART configuration files +export AERO_CONFIG_DIR="${PARMgfs}/ufs/gocart" + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +export fscav_aero="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +export dnats_aero=2 diff --git a/FV3GFSwfm/jkh/config.aeroanl b/FV3GFSwfm/jkh/config.aeroanl new file mode 100644 index 0000000000..c1752745ae --- /dev/null +++ b/FV3GFSwfm/jkh/config.aeroanl @@ -0,0 +1,31 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" +export STATICB_TYPE='identity' +export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" +export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" +export BERROR_DATE="20160630.000000" + +export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE="${EXECgfs}/fv3jedi_var.x" + +if [[ "${DOIAU}" == "YES" ]]; then + export aero_bkg_times="3,6,9" + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2" +else + export aero_bkg_times="6" + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2" +fi + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/jkh/config.aeroanlfinal b/FV3GFSwfm/jkh/config.aeroanlfinal new file mode 100644 index 0000000000..34e5d8f116 --- /dev/null +++ b/FV3GFSwfm/jkh/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/jkh/config.aeroanlinit b/FV3GFSwfm/jkh/config.aeroanlinit new file mode 100644 index 0000000000..7036d3d27b --- /dev/null +++ b/FV3GFSwfm/jkh/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/jkh/config.aeroanlrun b/FV3GFSwfm/jkh/config.aeroanlrun new file mode 100644 index 0000000000..012e5b79f3 --- /dev/null +++ b/FV3GFSwfm/jkh/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/jkh/config.aerosol_init b/FV3GFSwfm/jkh/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/jkh/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/jkh/config.anal b/FV3GFSwfm/jkh/config.anal new file mode 100644 index 0000000000..09aaa15a98 --- /dev/null +++ b/FV3GFSwfm/jkh/config.anal @@ -0,0 +1,153 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgfs}/gsi/global_convinfo.txt +export OZINFO=${FIXgfs}/gsi/global_ozinfo.txt +export SATINFO=${FIXgfs}/gsi/global_satinfo.txt +export OBERROR=${FIXgfs}/gsi/prepobs_errtable.global + +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export hofx_2m_sfcfile=".true." + export reducedgrid=".false." # not possible for sfc analysis, Jeff Whitaker says it's not useful anyway + export paranc=".false." # temporary until sfc io coded for parance (PR being prepared by T. Gichamo) + export CONVINFO=${FIXgfs}/gsi/global_convinfo_2mObs.txt + export ANAVINFO=${FIXgfs}/gsi/global_anavinfo_soilanal.l127.txt +fi + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgfs}/gsi/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/jkh/config.analcalc b/FV3GFSwfm/jkh/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/jkh/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/jkh/config.analdiag b/FV3GFSwfm/jkh/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/jkh/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/jkh/config.arch b/FV3GFSwfm/jkh/config.arch new file mode 100644 index 0000000000..a23bcce6ae --- /dev/null +++ b/FV3GFSwfm/jkh/config.arch @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} + +echo "END: config.arch" diff --git a/FV3GFSwfm/jkh/config.atmanl b/FV3GFSwfm/jkh/config.atmanl new file mode 100644 index 0000000000..88d47cbdb2 --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmanl @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/gdas_prototype_3d.yaml.j2" +export JEDIYAML="${PARMgfs}/gdas/atm/variational/3dvar_drpcg.yaml.j2" +export STATICB_TYPE="gsibec" +export INTERP_METHOD='barycentric' + +if [[ ${DOHYBVAR} = "YES" ]]; then + # shellcheck disable=SC2153 + export CASE_ANL=${CASE_ENS} + export BERROR_YAML="${PARMgfs}/gdas/atm/berror/hybvar_${STATICB_TYPE}.yaml.j2" +else + export CASE_ANL=${CASE} + export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" +fi + +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + +export layout_x_atmanl=8 +export layout_y_atmanl=8 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${EXECgfs}/fv3jedi_var.x + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/jkh/config.atmanlfinal b/FV3GFSwfm/jkh/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/jkh/config.atmanlinit b/FV3GFSwfm/jkh/config.atmanlinit new file mode 100644 index 0000000000..1aec88bcc2 --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmanlinit @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit + +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/jkh/config.atmanlrun b/FV3GFSwfm/jkh/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/jkh/config.atmensanl b/FV3GFSwfm/jkh/config.atmensanl new file mode 100644 index 0000000000..edc5a0f8df --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmensanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" +export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" +export INTERP_METHOD='barycentric' + +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + +export layout_x_atmensanl=8 +export layout_y_atmensanl=8 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${EXECgfs}/fv3jedi_letkf.x + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/jkh/config.atmensanlfinal b/FV3GFSwfm/jkh/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/jkh/config.atmensanlinit b/FV3GFSwfm/jkh/config.atmensanlinit new file mode 100644 index 0000000000..0eee2ffa82 --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmensanlinit @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit + +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/jkh/config.atmensanlrun b/FV3GFSwfm/jkh/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/jkh/config.atmos_products b/FV3GFSwfm/jkh/config.atmos_products new file mode 100644 index 0000000000..451f5eff86 --- /dev/null +++ b/FV3GFSwfm/jkh/config.atmos_products @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +########## config.atmos_products ########## +# atmosphere grib2 products specific + +echo "BEGIN: config.atmos_products" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmos_products + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +# Scripts used by this job +export INTERP_ATMOS_MASTERSH="${USHgfs}/interp_atmos_master.sh" +export INTERP_ATMOS_SFLUXSH="${USHgfs}/interp_atmos_sflux.sh" + +if [[ "${RUN:-}" == "gdas" ]]; then + export downset=1 + export FHOUT_PGBS=${FHOUT:-1} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="NO" # Create interpolated sflux.1p00 file + export WGNE="NO" # WGNE products are created for first FHMAX_WGNE forecast hours + export FHMAX_WGNE=0 +elif [[ "${RUN:-}" == "gfs" ]]; then + export downset=2 + export FHOUT_PGBS=${FHOUT_GFS:-3} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="YES" # Create interpolated sflux.1p00 file + export WGNE="YES" # WGNE products are created for first FHMAX_WGNE forecast hours + export FHMAX_WGNE=180 +fi + +export APCP_MSG="597" # Message number for APCP in GFSv16. Look for TODO in exglobal_atmos_products.sh + +# paramlist files for the different forecast hours and downsets +export paramlista="${PARMgfs}/product/gfs.fFFF.paramlist.a.txt" +export paramlista_anl="${PARMgfs}/product/gfs.anl.paramlist.a.txt" +export paramlista_f000="${PARMgfs}/product/gfs.f000.paramlist.a.txt" +export paramlistb="${PARMgfs}/product/gfs.fFFF.paramlist.b.txt" + +echo "END: config.atmos_products" diff --git a/FV3GFSwfm/jkh/config.awips b/FV3GFSwfm/jkh/config.awips new file mode 100644 index 0000000000..61f0dc5652 --- /dev/null +++ b/FV3GFSwfm/jkh/config.awips @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. "${EXPDIR}/config.resources" awips + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/jkh/config.base b/FV3GFSwfm/jkh/config.base new file mode 100644 index 0000000000..c15d6e1dbb --- /dev/null +++ b/FV3GFSwfm/jkh/config.base @@ -0,0 +1,397 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +export HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march +export EXECgfs="${HOMEgfs}/exec" +export FIXgfs="${HOMEgfs}/fix" +export PARMgfs="${HOMEgfs}/parm" +export SCRgfs="${HOMEgfs}/scripts" +export USHgfs="${HOMEgfs}/ush" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun" # TODO: set via prod_envir in Ops +export COMINsyn="/scratch1/NCEPDEV/global/glopara/com/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" +export BASE_CPLIC="/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${HOMEgfs}/FV3GFSrun" +export PTMP="${HOMEgfs}/FV3GFSrun" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products +export DO_TRACKER="YES" # Hurricane track verification +export DO_GENESIS="YES" # Cyclone genesis verification +export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2024032000 +export EDATE=2024032000 +export EXP_WARM_START=".false." +export assim_freq=24 ## JKH +export PSLOT="jkh" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ICSDIR="@ICSDIR@" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" +#JKHexport ICSDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/Kate.Zhang/fv3gfs/comrot/CAT_C96_CCPP" +#JKHexport ICSORG="/scratch1/BMC/gsd-fv3/exp/UFS-CAMsuite_dev1/FV3GFSrun/c96l64ics_v15" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +# shellcheck disable=SC2016 +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export DO_CATChem="NO" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="C384" +export CASE_ENS="@CASEENS@" +export OCNRES="025" +export ICERES="${OCNRES}" +# These are the currently recommended grid-combinations +case "${CASE}" in + "C48") + export waveGRD='uglo_100km' + ;; + "C96" | "C192") + export waveGRD='uglo_100km' + ;; + "C384") + export waveGRD='uglo_100km' + ;; + "C768" | "C1152") + export waveGRD='uglo_m1g16' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT_OCNICE=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 +export FHMAX_GFS=120 +export FHOUT_GFS=3 +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +export FHOUT_OCNICE_GFS=6 +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=24 +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDISNOWDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=0 +export NMEM_ENS_GFS=0 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +export GSI_SOILANAL=NO + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="YES" # Run fit to observations package +export DO_VRFY_OCEANDA="NO" # Run SOCA Ocean and Seaice DA verification tasks + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +# The monitor jobs are not yet supported for JEDIATMVAR +if [[ ${DO_JEDIATMVAR} = "YES" ]]; then + export DO_VERFOZN="NO" # Ozone data assimilation monitoring + export DO_VERFRAD="NO" # Radiance data assimilation monitoring + export DO_VMINMON="NO" # GSI minimization monitoring +fi + +echo "END: config.base" diff --git a/FV3GFSwfm/jkh/config.catchem b/FV3GFSwfm/jkh/config.catchem new file mode 100644 index 0000000000..020c355235 --- /dev/null +++ b/FV3GFSwfm/jkh/config.catchem @@ -0,0 +1,53 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Turn off warnings about unused variables +# shellcheck disable=SC2034 + + +# Path to the input data tree +case ${machine} in + "HERA") + AERO_INPUTS_DIR="/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop" + ;; + "ORION" | "HERCULES") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine ${machine} unsupported for aerosols" + exit 2 + ;; +esac +export AERO_INPUTS_DIR + +export AERO_DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table.catchem" +export AERO_FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table.catchem" +# Biomass burning emission dataset. Choose from: gbbepx, qfed, none +export EMITYPE=2 # 1: MODIS, 2: GBBEPx +export EMIYEAR=2019 # 2014: CEDS, 2019: CEDS + + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +export fscav_aero="'*:0.2','so2:0.1','msa:0.1','dms:0.1','sulf:0.4','bc1:0.3','bc2:0.3','oc1:0.1','oc2:0.1','seas1:0.45','seas2:0.45','seas3:0.45','seas4:0.45','seas5:0.45','dust1:0.3','dust2:0.3','dust3:0.3','dust4:0.3','dust5:0.3'" +export dust_opt_cplchp="${dust_opt_cplchp:-5}" +export dust_alpha_catc="${dust_alpha_catc:-0.04}" +export dust_gamma_catc="${dust_gamma_catc:-1.0}" +export seas_emis_scale="${seas_emis_scale:-"1.,1.,1.,1.,1."}" +# Large scale wet deposition option +export wetdep_ls_cplchp="${wetdep_ls_cplchp:-0}" + +# +# Numbe rof diagnostic aerosol tracers (default: 0) +export dnats_aero=0 diff --git a/FV3GFSwfm/jkh/config.cleanup b/FV3GFSwfm/jkh/config.cleanup new file mode 100644 index 0000000000..1908c91bb5 --- /dev/null +++ b/FV3GFSwfm/jkh/config.cleanup @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.cleanup ########## +echo "BEGIN: config.cleanup" + +# Get task specific resources +source "${EXPDIR}/config.resources" cleanup + +export CLEANUP_COM="YES" # NO=retain ROTDIR. YES default in cleanup.sh + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +# Specify the list of files to exclude from the first stage of cleanup +# Because arrays cannot be exported, list is a single string of comma- +# separated values. This string is split to form an array at runtime. +case ${RUN} in + gdas | gfs) exclude_string="*prepbufr*, *cnvstat*, *atmanl.nc" ;; + enkf*) exclude_string="*f006.ens*" ;; + *) exclude_string="" ;; +esac +export exclude_string + +echo "END: config.cleanup" \ No newline at end of file diff --git a/FV3GFSwfm/jkh/config.com b/FV3GFSwfm/jkh/config.com new file mode 100644 index 0000000000..2f99e709ea --- /dev/null +++ b/FV3GFSwfm/jkh/config.com @@ -0,0 +1,97 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_ROTDIR_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_CONF_TMPL=${COM_BASE}'/conf' +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_SNOW_ANALYSIS_TMPL=${COM_BASE}'/analysis/snow' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2' +declare -rx COM_ATMOS_GRIB_GRID_TMPL=${COM_ATMOS_GRIB_TMPL}'/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_OZNMON_TMPL=${COM_BASE}'/products/atmos/oznmon' +declare -rx COM_ATMOS_RADMON_TMPL=${COM_BASE}'/products/atmos/radmon' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' +declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' +declare -rx COM_ICE_NETCDF_TMPL=${COM_BASE}'/products/ice/netcdf' +declare -rx COM_ICE_GRIB_TMPL=${COM_BASE}'/products/ice/grib2' +declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/jkh/config.earc b/FV3GFSwfm/jkh/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/jkh/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/jkh/config.ecen b/FV3GFSwfm/jkh/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/jkh/config.echgres b/FV3GFSwfm/jkh/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/jkh/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/jkh/config.ediag b/FV3GFSwfm/jkh/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/jkh/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/jkh/config.efcs b/FV3GFSwfm/jkh/config.efcs new file mode 100644 index 0000000000..0cbc6d0830 --- /dev/null +++ b/FV3GFSwfm/jkh/config.efcs @@ -0,0 +1,102 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# Turn off components in ensemble +# export DO_AERO="NO" +# export DO_OCN="NO" +# export DO_ICE="NO" +export DO_WAVE="NO" + +export CASE="${CASE_ENS}" + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +# Ocean/Ice/Waves ensemble configurations are identical to deterministic member +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +[[ "${DO_CATChem}" == "YES" ]] && string="${string} --catchem" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + +# Get task specific resources +. "${EXPDIR}/config.resources" efcs + +# nggps_diag_nml +export FHOUT=${FHOUT_ENKF:-3} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT}} +fi + +# model_configure +export FHMIN=${FHMIN_ENKF:-3} +export FHMAX=${FHMAX_ENKF:-9} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX}} +fi + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [[ "${QUILTING}" == ".true." ]] && [[ "${OUTPUT_GRID}" == "gaussian_grid" ]]; then + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" +else + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da_orig" +fi + +# Model config option for Ensemble +# export TYPE=nh # choices: nh, hydro +# export MONO=non-mono # choices: mono, non-mono + +# gfs_physics_nml +export FHSWR=3600. +export FHLWR=3600. +export IEMS=1 +export ISOL=2 +export ICO2=2 +export dspheat=".true." +export shal_cnv=".true." +export FHZER=6 + +# Set PREFIX_ATMINC to r when recentering on +if [[ ${RECENTER_ENKF:-"YES"} == "YES" ]]; then + export PREFIX_ATMINC="r" +fi + +# For IAU, write restarts at beginning of window also +if [[ "${DOIAU_ENKF:-}" == "YES" ]]; then + export restart_interval="3" +else + export restart_interval="6" +fi + +echo "END: config.efcs" diff --git a/FV3GFSwfm/jkh/config.eobs b/FV3GFSwfm/jkh/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/jkh/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/jkh/config.epos b/FV3GFSwfm/jkh/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/jkh/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/jkh/config.esfc b/FV3GFSwfm/jkh/config.esfc new file mode 100644 index 0000000000..684dea4ee3 --- /dev/null +++ b/FV3GFSwfm/jkh/config.esfc @@ -0,0 +1,30 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [[ ${DOIAU_ENKF} = "YES" ]]; then + export DOSFCANL_ENKF="NO" +fi + +# Turn off NST in JEDIATMENS +if [[ "${DO_JEDIATMENS}" == "YES" ]]; then + export DONST="NO" +fi + +# set up soil analysis +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export DO_LNDINC=".true." + export LND_SOI_FILE="lnd_incr" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/jkh/config.eupd b/FV3GFSwfm/jkh/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/jkh/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/jkh/config.fcst b/FV3GFSwfm/jkh/config.fcst new file mode 100644 index 0000000000..9b11fe4fef --- /dev/null +++ b/FV3GFSwfm/jkh/config.fcst @@ -0,0 +1,295 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case ${WAVE_CDUMP} in + both | "${CDUMP/enkf}" ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +[[ "${DO_CATChem}" == "YES" ]] && string="${string} --catchem" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + +# Forecast length for GFS forecast +case ${RUN} in + *gfs) + # shellcheck disable=SC2153 + export FHMAX=${FHMAX_GFS} + # shellcheck disable=SC2153 + export FHOUT=${FHOUT_GFS} + export FHMAX_HF=${FHMAX_HF_GFS} + export FHOUT_HF=${FHOUT_HF_GFS} + export FHOUT_OCNICE=${FHOUT_OCNICE_GFS} + ;; + *gdas) + export FHMAX_HF=0 + export FHOUT_HF=0 + ;; + *) + echo "FATAL ERROR: Unsupported RUN '${RUN}'" + exit 1 +esac + +# Get task specific resources +source "${EXPDIR}/config.resources" fcst +export domains_stack_size="16000000" + + +if [[ "${DONST}" == "YES" ]]; then + source "${EXPDIR}/config.nsst" +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### + +export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" +#export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if (( gwd_opt == 1 )); then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if (( gwd_opt == 2 )); then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=1 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".false." + export do_ugwp_v1=".true." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".true." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".true." + export do_ugwp_v1_orog_only=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level + if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then + export cdmbgwd=${cdmbgwd_gsl} + fi +fi + +# Sponge layer settings +export tau=0. +export rf_cutoff=10. +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulance schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +if [[ $CCPP_SUITE = "FV3_GFS_v17_p8_ugwpv1_catchem" ]] || [[ $CCPP_SUITE = "FV3_GFS_v17_coupled_p8_ugwpv1_catchem" ]] ; then + export IAER=2011 ; #spectral band mapping method for aerosol optical properties from online chemical model +else + export IAER=1011 ; #spectral band mapping method for aerosol optical properties from MERRA-2 data +fi +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +case ${imp_physics} in + 99) # ZhaoCarr + export ncld=1 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + ;; + 6) # WSM6 + export ncld=2 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export nwat=6 + ;; + 8) # Thompson + export ncld=2 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".true." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + ;; + 11) # GFDL + export ncld=5 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + ;; + *) echo "Unknown microphysics option, ABORT!" ;; +esac + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="ufs.frac" +if [[ "${FRAC_GRID:-".true."}" == ".false." ]]; then + export cplmode="ufs.nfrac" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- +if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" + + if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval="3" + else + export restart_interval="6" + fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval=${restart_interval_gfs:-12} + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "${CASE}" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/jkh/config.fit2obs b/FV3GFSwfm/jkh/config.fit2obs new file mode 100644 index 0000000000..9b3fb87ead --- /dev/null +++ b/FV3GFSwfm/jkh/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${FIXgfs}/gsi/prepobs_errtable.global +export HYBLEVS=${FIXgfs}/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/jkh/config.gempak b/FV3GFSwfm/jkh/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/jkh/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/jkh/config.genesis b/FV3GFSwfm/jkh/config.genesis new file mode 100644 index 0000000000..62a1bf88c0 --- /dev/null +++ b/FV3GFSwfm/jkh/config.genesis @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis ########## +echo "BEGIN: config.genesis" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis" diff --git a/FV3GFSwfm/jkh/config.genesis_fsu b/FV3GFSwfm/jkh/config.genesis_fsu new file mode 100644 index 0000000000..13948592c4 --- /dev/null +++ b/FV3GFSwfm/jkh/config.genesis_fsu @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis_fsu ########## +echo "BEGIN: config.genesis_fsu" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis_fsu + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis_fsu" diff --git a/FV3GFSwfm/jkh/config.ice b/FV3GFSwfm/jkh/config.ice new file mode 100644 index 0000000000..055bd1e2bb --- /dev/null +++ b/FV3GFSwfm/jkh/config.ice @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +# Override atm-only FV3 settings when ice model is on +export min_seaice="1.0e-6" +export use_cice_alb=".true." + +export MESH_ICE="mesh.mx${ICERES}.nc" + +export CICE_GRID="grid_cice_NEMS_mx${ICERES}.nc" +export CICE_MASK="kmtu_cice_NEMS_mx${ICERES}.nc" + +echo "END: config.ice" diff --git a/FV3GFSwfm/jkh/config.metp b/FV3GFSwfm/jkh/config.metp new file mode 100644 index 0000000000..8260d1c472 --- /dev/null +++ b/FV3GFSwfm/jkh/config.metp @@ -0,0 +1,100 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export model_dir=${ARCDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/jkh/config.mos b/FV3GFSwfm/jkh/config.mos new file mode 100644 index 0000000000..a74c7e7d21 --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.mos ########## +echo "BEGIN: config.mos" + +# MOS package location +export HOMEgfs_mos=/lfs/h1/ops/prod/packages/gfs_mos.v${mos_ver} + +echo "END: config.mos" diff --git a/FV3GFSwfm/jkh/config.mos_ext_grd_fcst b/FV3GFSwfm/jkh/config.mos_ext_grd_fcst new file mode 100644 index 0000000000..db94af945f --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_ext_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_fcst ########## +echo "BEGIN: config.mos_ext_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_fcst" diff --git a/FV3GFSwfm/jkh/config.mos_ext_grd_prdgen b/FV3GFSwfm/jkh/config.mos_ext_grd_prdgen new file mode 100644 index 0000000000..ade31b0c1a --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_ext_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prdgen ########## +echo "BEGIN: config.mos_ext_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prdgen" diff --git a/FV3GFSwfm/jkh/config.mos_ext_grd_prep b/FV3GFSwfm/jkh/config.mos_ext_grd_prep new file mode 100644 index 0000000000..0ba14e2573 --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_ext_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prep ########## +echo "BEGIN: config.mos_ext_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prep" diff --git a/FV3GFSwfm/jkh/config.mos_ext_stn_fcst b/FV3GFSwfm/jkh/config.mos_ext_stn_fcst new file mode 100644 index 0000000000..5b26d196f9 --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_ext_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_fcst ########## +echo "BEGIN: config.mos_ext_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_fcst" diff --git a/FV3GFSwfm/jkh/config.mos_ext_stn_prdgen b/FV3GFSwfm/jkh/config.mos_ext_stn_prdgen new file mode 100644 index 0000000000..9f63eb56fd --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_ext_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prdgen ########## +echo "BEGIN: config.mos_ext_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prdgen" diff --git a/FV3GFSwfm/jkh/config.mos_ext_stn_prep b/FV3GFSwfm/jkh/config.mos_ext_stn_prep new file mode 100644 index 0000000000..c443503f11 --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_ext_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prep ########## +echo "BEGIN: config.mos_ext_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prep" diff --git a/FV3GFSwfm/jkh/config.mos_grd_fcst b/FV3GFSwfm/jkh/config.mos_grd_fcst new file mode 100644 index 0000000000..bd0d50a04d --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_fcst ########## +echo "BEGIN: config.mos_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_fcst" diff --git a/FV3GFSwfm/jkh/config.mos_grd_prdgen b/FV3GFSwfm/jkh/config.mos_grd_prdgen new file mode 100644 index 0000000000..dd9ce8bcd8 --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prdgen ########## +echo "BEGIN: config.mos_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prdgen" diff --git a/FV3GFSwfm/jkh/config.mos_grd_prep b/FV3GFSwfm/jkh/config.mos_grd_prep new file mode 100644 index 0000000000..8a3d334d0d --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prep ########## +echo "BEGIN: config.mos_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prep" diff --git a/FV3GFSwfm/jkh/config.mos_stn_fcst b/FV3GFSwfm/jkh/config.mos_stn_fcst new file mode 100644 index 0000000000..7cb266ea3a --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_fcst ########## +echo "BEGIN: config.mos_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_fcst" diff --git a/FV3GFSwfm/jkh/config.mos_stn_prdgen b/FV3GFSwfm/jkh/config.mos_stn_prdgen new file mode 100644 index 0000000000..f92edbd0fd --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prdgen ########## +echo "BEGIN: config.mos_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prdgen" diff --git a/FV3GFSwfm/jkh/config.mos_stn_prep b/FV3GFSwfm/jkh/config.mos_stn_prep new file mode 100644 index 0000000000..b236f42879 --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prep ########## +echo "BEGIN: config.mos_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prep" diff --git a/FV3GFSwfm/jkh/config.mos_wx_ext_prdgen b/FV3GFSwfm/jkh/config.mos_wx_ext_prdgen new file mode 100644 index 0000000000..054cb950ad --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_wx_ext_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_ext_prdgen ########## +echo "BEGIN: config.mos_wx_ext_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_ext_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_ext_prdgen" diff --git a/FV3GFSwfm/jkh/config.mos_wx_prdgen b/FV3GFSwfm/jkh/config.mos_wx_prdgen new file mode 100644 index 0000000000..d4481b65fc --- /dev/null +++ b/FV3GFSwfm/jkh/config.mos_wx_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_prdgen ########## +echo "BEGIN: config.mos_wx_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_prdgen" diff --git a/FV3GFSwfm/jkh/config.npoess b/FV3GFSwfm/jkh/config.npoess new file mode 100644 index 0000000000..9a388d2e6b --- /dev/null +++ b/FV3GFSwfm/jkh/config.npoess @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.npoess ########## +# GFS NPOESS step specific + +echo "BEGIN: config.npoess" + +# Get task specific resources +. "${EXPDIR}/config.resources" npoess + +echo "END: config.npoess" diff --git a/FV3GFSwfm/jkh/config.nsst b/FV3GFSwfm/jkh/config.nsst new file mode 100644 index 0000000000..7bda81f058 --- /dev/null +++ b/FV3GFSwfm/jkh/config.nsst @@ -0,0 +1,39 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# Set NST_MODEL for JEDIATMVAR or JEDIATMENS +if [[ "${DO_JEDIATMVAR}" == "YES" || "${DO_JEDIATMENS}" == "YES" ]]; then + export NST_MODEL=1 +fi + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +cdate="${PDY}${cyc}" +if (( cdate < 2017072000 )); then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if (( NST_GSI > 0 )); then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/jkh/config.oceanice_products b/FV3GFSwfm/jkh/config.oceanice_products new file mode 100644 index 0000000000..9e5c5b1c68 --- /dev/null +++ b/FV3GFSwfm/jkh/config.oceanice_products @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.oceanice_products ########## + +echo "BEGIN: config.oceanice_products" + +# Get task specific resources +source "${EXPDIR}/config.resources" oceanice_products + +export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products.yaml" + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.oceanice_products" diff --git a/FV3GFSwfm/jkh/config.ocn b/FV3GFSwfm/jkh/config.ocn new file mode 100644 index 0000000000..317a76e58a --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocn @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +export MESH_OCN="mesh.mx${OCNRES}.nc" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi + +# Time interval for applying the increment +if [[ "${DOIAU}" == "YES" ]]; then + export ODA_INCUPD_NHOURS="6.0" +else + export ODA_INCUPD_NHOURS="3.0" +fi + + + +echo "END: config.ocn" diff --git a/FV3GFSwfm/jkh/config.ocnanal b/FV3GFSwfm/jkh/config.ocnanal new file mode 100644 index 0000000000..1294b429e9 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanal @@ -0,0 +1,26 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR="${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config" +export OBS_LIST=${PARMgfs}/gdas/soca/obs/obs_list.yaml +export OBS_YAML="${OBS_LIST}" +export FV3JEDI_STAGE_YAML="${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml" +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=100 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size resolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +export COMIN_OBS=@COMIN_OBS@ + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/jkh/config.ocnanalbmat b/FV3GFSwfm/jkh/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/jkh/config.ocnanalchkpt b/FV3GFSwfm/jkh/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/jkh/config.ocnanalecen b/FV3GFSwfm/jkh/config.ocnanalecen new file mode 100644 index 0000000000..b64c2bcf62 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanalecen @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalecen ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalecen" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalecen + +echo "END: config.ocnanalecen" diff --git a/FV3GFSwfm/jkh/config.ocnanalpost b/FV3GFSwfm/jkh/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/jkh/config.ocnanalprep b/FV3GFSwfm/jkh/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/jkh/config.ocnanalrun b/FV3GFSwfm/jkh/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/jkh/config.ocnanalvrfy b/FV3GFSwfm/jkh/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/jkh/config.postsnd b/FV3GFSwfm/jkh/config.postsnd new file mode 100644 index 0000000000..7ec0ad6321 --- /dev/null +++ b/FV3GFSwfm/jkh/config.postsnd @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/jkh/config.prep b/FV3GFSwfm/jkh/config.prep new file mode 100644 index 0000000000..6009280db0 --- /dev/null +++ b/FV3GFSwfm/jkh/config.prep @@ -0,0 +1,61 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +export TROPCYQCRELOSH="${SCRgfs}/exglobal_atmos_tropcy_qc_reloc.sh" + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=${FIXgfs}/gsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/jkh/config.prepatmiodaobs b/FV3GFSwfm/jkh/config.prepatmiodaobs new file mode 100644 index 0000000000..e29cf67b07 --- /dev/null +++ b/FV3GFSwfm/jkh/config.prepatmiodaobs @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.prepatmiodaobs ########## +# Atm Obs Prep specific + +echo "BEGIN: config.prepatmiodaobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" prepatmiodaobs + +echo "END: config.prepatmiodaobs" diff --git a/FV3GFSwfm/jkh/config.prepchem b/FV3GFSwfm/jkh/config.prepchem new file mode 100755 index 0000000000..cdb909e125 --- /dev/null +++ b/FV3GFSwfm/jkh/config.prepchem @@ -0,0 +1,18 @@ +#!/bin/ksh -x + +########## config.prepchem ########## +# PREPBUFR specific configuration + +echo "BEGIN: config.prepchem" + +# Get task specific resources +. $EXPDIR/config.resources prepchem + + +# Set prepchem variables + +# Set job and DATAROOT +export job=${CDUMP}_prep_${cyc} +export EMIDIR="/scratch1/BMC/gsd-fv3-dev/lzhang/emi_" +#export EMIINPUT=/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop/emi_C" +echo "END: config.prepchem" diff --git a/FV3GFSwfm/jkh/config.prepoceanobs b/FV3GFSwfm/jkh/config.prepoceanobs new file mode 100644 index 0000000000..977097acaf --- /dev/null +++ b/FV3GFSwfm/jkh/config.prepoceanobs @@ -0,0 +1,20 @@ +#!/bin/bash + +########## config.prepoceanobs ########## + +echo "BEGIN: config.prepoceanobs" + +export OCNOBS2IODAEXEC=${HOMEgfs}/sorc/gdas.cd/build/bin/gdas_obsprovider2ioda.x + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBSPREP_YAML=${PARMgfs}/gdas/soca/obsprep/obsprep_config.yaml +export OBS_LIST=${PARMgfs}/gdas/soca/obs/obs_list.yaml +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} + +# ocean analysis needs own dmpdir until standard dmpdir has full ocean obs +export DMPDIR=/scratch1/NCEPDEV/global/glopara/data/experimental_obs + +# Get task specific resources +. "${EXPDIR}/config.resources" prepoceanobs +echo "END: config.prepoceanobs" diff --git a/FV3GFSwfm/jkh/config.prepsnowobs b/FV3GFSwfm/jkh/config.prepsnowobs new file mode 100644 index 0000000000..60ca16ce9e --- /dev/null +++ b/FV3GFSwfm/jkh/config.prepsnowobs @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.prepsnowobs ########## +# Snow Obs Prep specific + +echo "BEGIN: config.prepsnowobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" prepsnowobs + +export GTS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_gts.yaml.j2" +export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" + +export BUFR2IODAX="${EXECgfs}/bufr2ioda.x" + +export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" +export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" + +export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" + +echo "END: config.prepsnowobs" diff --git a/FV3GFSwfm/jkh/config.resources b/FV3GFSwfm/jkh/config.resources new file mode 100644 index 0000000000..c405505498 --- /dev/null +++ b/FV3GFSwfm/jkh/config.resources @@ -0,0 +1,1213 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if (( $# != 1 )); then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "stage_ic aerosol_init" + echo "prep prepsnowobs prepatmiodaobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "snowanl" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst echgres" + echo "upp atmos_products" + echo "tracker genesis genesis_fsu" + echo "verfozn verfrad vminmon fit2obs metp arch cleanup" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic oceanice_products" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak npoess" + echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalecen ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +case ${machine} in + "WCOSS2") npe_node_max=128;; + "HERA") npe_node_max=40;; + "ORION") npe_node_max=40;; + "HERCULES") npe_node_max=80;; + "JET") + case ${PARTITION_BATCH} in + "xjet") npe_node_max=24;; + "vjet" | "sjet") npe_node_max=16;; + "kjet") npe_node_max=40;; + *) + echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}" + exit 3 + esac + ;; + "S4") + case ${PARTITION_BATCH} in + "s4") npe_node_max=32;; + "ivy") npe_node_max=20;; + *) + echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}" + exit 3 + esac + ;; + "AWSPW") + export PARTITION_BATCH="compute" + npe_node_max=40 + ;; + "CONTAINER") + npe_node_max=1 + ;; + *) + echo "FATAL ERROR: Unknown machine encountered by ${BASH_SOURCE[0]}" + exit 2 + ;; +esac +export npe_node_max + +case ${step} in + "prep") + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" == "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40GB" + fi + ;; + + "prepsnowobs") + export wtime_prepsnowobs="00:05:00" + export npe_prepsnowobs=1 + export nth_prepsnowobs=1 + export npe_node_prepsnowobs=1 + ;; + + "prepatmiodaobs") + export wtime_prepatmiodaobs="00:30:00" + export npe_prepatmiodaobs=1 + export nth_prepatmiodaobs=1 + export npe_node_prepatmiodaobs=$(( npe_node_max / nth_prepatmiodaobs )) + ;; + + "aerosol_init") + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + export npe_node_aerosol_init=$(( npe_node_max / nth_aerosol_init )) + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6GB" + ;; + + "prepchem") + export wtime_prepchem="00:25:00" + export npe_prepchem=2 + export nth_prepchem=1 + export npe_node_prepchem=$(( npe_node_max / nth_prepchem )) + export NTASKS=${npe_prepchem} + ;; + + "waveinit") + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + export npe_node_waveinit=$(( npe_node_max / nth_waveinit )) + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + ;; + + "waveprep") + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + export npe_node_waveprep=$(( npe_node_max / nth_waveprep )) + export npe_node_waveprep_gfs=$(( npe_node_max / nth_waveprep_gfs )) + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + ;; + + "wavepostsbs") + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$(( npe_node_max / nth_wavepostsbs )) + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + ;; + + "wavepostbndpnt") + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + ;; + + "wavepostbndpntbll") + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll )) + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + ;; + + "wavepostpnt") + export wtime_wavepostpnt="04:00:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt )) + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + ;; + + "wavegempak") + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$(( npe_node_max / nth_wavegempak )) + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + ;; + + "waveawipsbulls") + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(( npe_node_max / nth_waveawipsbulls )) + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + ;; + + "waveawipsgridded") + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(( npe_node_max / nth_waveawipsgridded )) + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + ;; + + "atmanlinit") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + export layout_gsib_x=$(( layout_x * 3 )) + export layout_gsib_y=$(( layout_y * 2 )) + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + export npe_node_atmanlinit=$(( npe_node_max / nth_atmanlinit )) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + ;; + + "atmanlrun") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + export wtime_atmanlrun="00:30:00" + export npe_atmanlrun=$(( layout_x * layout_y * 6 )) + export npe_atmanlrun_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + export npe_node_atmanlrun=$(( npe_node_max / nth_atmanlrun )) + export memory_atmanlrun="96GB" + export is_exclusive=True + ;; + + "atmanlfinal") + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + export npe_node_atmanlfinal=$(( npe_node_max / nth_atmanlfinal )) + export is_exclusive=True + ;; + + "snowanl") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") + layout_x=6 + layout_y=6 + ;; + "C384") + layout_x=5 + layout_y=5 + ;; + "C192" | "C96" | "C48") + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + export wtime_snowanl="00:15:00" + export npe_snowanl=$(( layout_x * layout_y * 6 )) + export nth_snowanl=1 + export npe_node_snowanl=$(( npe_node_max / nth_snowanl )) + ;; + + "aeroanlinit") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") + layout_x=8 + layout_y=8 + ;; + "C384") + layout_x=8 + layout_y=8 + ;; + "C192" | "C96") + layout_x=8 + layout_y=8 + ;; + "C48" ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + export npe_node_aeroanlinit=$(( npe_node_max / nth_aeroanlinit )) + export memory_aeroanlinit="3072M" + ;; + + "aeroanlrun") + case ${CASE} in + "C768") + layout_x=8 + layout_y=8 + ;; + "C384") + layout_x=8 + layout_y=8 + ;; + "C192" | "C96") + layout_x=8 + layout_y=8 + ;; + "C48" ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + export npe_aeroanlrun=$(( layout_x * layout_y * 6 )) + export npe_aeroanlrun_gfs=$(( layout_x * layout_y * 6 )) + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + export npe_node_aeroanlrun=$(( npe_node_max / nth_aeroanlrun )) + export is_exclusive=True + ;; + + "aeroanlfinal") + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + export npe_node_aeroanlfinal=$(( npe_node_max / nth_aeroanlfinal )) + export memory_aeroanlfinal="3072M" + ;; + + "ocnanalprep") + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + export npe_node_ocnanalprep=$(( npe_node_max / nth_ocnanalprep )) + export memory_ocnanalprep="24GB" + ;; + + "prepoceanobs") + export wtime_prepoceanobs="00:10:00" + export npe_prepoceanobs=1 + export nth_prepoceanobs=1 + export npe_node_prepoceanobs=$(( npe_node_max / nth_prepoceanobs )) + export memory_prepoceanobs="48GB" + ;; + + "ocnanalbmat") + npes=16 + case ${OCNRES} in + "025") npes=480;; + "050") npes=16;; + "500") npes=16;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + export npe_node_ocnanalbmat=$(( npe_node_max / nth_ocnanalbmat )) + ;; + + "ocnanalrun") + npes=16 + case ${OCNRES} in + "025") + npes=480 + memory_ocnanalrun="96GB" + ;; + "050") + npes=16 + memory_ocnanalrun="96GB" + ;; + "500") + npes=16 + memory_ocnanalrun="24GB" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + + export wtime_ocnanalrun="00:15:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + export npe_node_ocnanalrun=$(( npe_node_max / nth_ocnanalrun )) + export memory_ocnanalrun + ;; + + "ocnanalecen") + npes=16 + case ${OCNRES} in + "025") + npes=40 + memory_ocnanalecen="96GB" + ;; + "050") + npes=16 + memory_ocnanalecen="96GB" + ;; + "500") + npes=16 + memory_ocnanalecen="24GB" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + + export wtime_ocnanalecen="00:10:00" + export npe_ocnanalecen=${npes} + export nth_ocnanalecen=1 + export is_exclusive=True + export npe_node_ocnanalecen=$(( npe_node_max / nth_ocnanalecen )) + export memory_ocnanalecen + ;; + + "ocnanalchkpt") + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + export npe_node_ocnanalchkpt=$(( npe_node_max / nth_ocnanalchkpt )) + case ${OCNRES} in + "025") + memory_ocnanalchkpt="128GB" + npes=40;; + "050") + memory_ocnanalchkpt="32GB" + npes=16;; + "500") + memory_ocnanalchkpt="32GB" + npes=8;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + export npe_ocnanalchkpt=${npes} + export memory_ocnanalchkpt + ;; + + "ocnanalpost") + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + export npe_node_ocnanalpost=$(( npe_node_max / nth_ocnanalpost )) + ;; + + "ocnanalvrfy") + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + export npe_node_ocnanalvrfy=$(( npe_node_max / nth_ocnanalvrfy )) + export memory_ocnanalvrfy="24GB" + ;; + + "anal") + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" == "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + case ${CASE} in + "C384") + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + ;; + "C192" | "C96" | "C48") + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} == "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} == "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} == "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_node_anal=$(( npe_node_max / nth_anal )) + export nth_cycle=${nth_anal} + export npe_node_cycle=$(( npe_node_max / nth_cycle )) + export is_exclusive=True + ;; + + "analcalc") + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$(( npe_node_max / nth_analcalc )) + export is_exclusive=True + export memory_analcalc="48GB" + ;; + + "analdiag") + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$(( npe_node_max / nth_analdiag )) + export memory_analdiag="48GB" + ;; + + "sfcanl") + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + export npe_node_sfcanl=$(( npe_node_max / nth_sfcanl )) + export is_exclusive=True + ;; + + "fcst" | "efcs") + export is_exclusive=True + + if [[ "${step}" == "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" == "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" == ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-${FV3PETS}} + (( "${MEDPETS}" > 300 )) && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + CHMPETS=0; CHMTHREADS=0 + if [[ "${DO_AERO}" == "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + export CHMPETS CHMTHREADS + + CHMPETS=0; CHMTHREADS=0 + if [[ "${DO_CATChem}" == "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + echo "CATChem using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + export CHMPETS CHMTHREADS + + WAVPETS=0; WAVTHREADS=0 + if [[ "${DO_WAVE}" == "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + export WAVPETS WAVTHREADS + + OCNPETS=0; OCNTHREADS=0 + if [[ "${DO_OCN}" == "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + export OCNPETS OCNTHREADS + + ICEPETS=0; ICETHREADS=0 + if [[ "${DO_ICE}" == "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + export ICEPETS ICETHREADS + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:15:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + "C768" | "C1152") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + ;; + + "oceanice_products") + export wtime_oceanice_products="00:15:00" + export npe_oceanice_products=1 + export npe_node_oceanice_products=1 + export nth_oceanice_products=1 + export memory_oceanice_products="96GB" + ;; + + "upp") + case "${CASE}" in + "C48" | "C96") + export npe_upp=${CASE:1} + ;; + "C192" | "C384" | "C768") + export npe_upp=120 + export memory_upp="48GB" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_node_upp=${npe_upp} + + export nth_upp=1 + + export wtime_upp="00:15:00" + if (( npe_node_upp > npe_node_max )); then + export npe_node_upp=${npe_node_max} + fi + export is_exclusive=True + ;; + + "atmos_products") + export wtime_atmos_products="00:15:00" + export npe_atmos_products=24 + export nth_atmos_products=1 + export npe_node_atmos_products="${npe_atmos_products}" + export wtime_atmos_products_gfs="${wtime_atmos_products}" + export npe_atmos_products_gfs="${npe_atmos_products}" + export nth_atmos_products_gfs="${nth_atmos_products}" + export npe_node_atmos_products_gfs="${npe_node_atmos_products}" + export is_exclusive=True + ;; + + "verfozn") + export wtime_verfozn="00:05:00" + export npe_verfozn=1 + export nth_verfozn=1 + export npe_node_verfozn=1 + export memory_verfozn="1G" + ;; + + "verfrad") + export wtime_verfrad="00:40:00" + export npe_verfrad=1 + export nth_verfrad=1 + export npe_node_verfrad=1 + export memory_verfrad="5G" + ;; + + "vminmon") + export wtime_vminmon="00:05:00" + export npe_vminmon=1 + export nth_vminmon=1 + export npe_node_vminmon=1 + export wtime_vminmon_gfs="00:05:00" + export npe_vminmon_gfs=1 + export nth_vminmon_gfs=1 + export npe_node_vminmon_gfs=1 + export memory_vminmon="1G" + ;; + + "tracker") + export wtime_tracker="00:10:00" + export npe_tracker=1 + export nth_tracker=1 + export npe_node_tracker=1 + export memory_tracker="4G" + ;; + + "genesis") + export wtime_genesis="00:25:00" + export npe_genesis=1 + export nth_genesis=1 + export npe_node_genesis=1 + export memory_genesis="10G" + ;; + + "genesis_fsu") + export wtime_genesis_fsu="00:10:00" + export npe_genesis_fsu=1 + export nth_genesis_fsu=1 + export npe_node_genesis_fsu=1 + export memory_genesis_fsu="10G" + ;; + + "fit2obs") + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + ;; + + "metp") + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + ;; + + "echgres") + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" == "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + ;; + + "init") + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70GB" + ;; + + "init_chem") + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + ;; + + "mom6ic") + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + ;; + + "arch" | "earc" | "getic") + declare -x "wtime_${step}"="06:00:00" + declare -x "npe_${step}"="1" + declare -x "npe_node_${step}"="1" + declare -x "nth_${step}"="1" + declare -x "memory_${step}"="4096M" + if [[ "${machine}" == "WCOSS2" ]]; then + declare -x "memory_${step}"="50GB" + fi + ;; + + "cleanup") + export wtime_cleanup="00:15:00" + export npe_cleanup=1 + export npe_node_cleanup=1 + export nth_cleanup=1 + export memory_cleanup="4096M" + ;; + + "stage_ic") + export wtime_stage_ic="00:15:00" + export npe_stage_ic=1 + export npe_node_stage_ic=1 + export nth_stage_ic=1 + export is_exclusive=True + ;; + + "atmensanlinit") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + export npe_node_atmensanlinit=$(( npe_node_max / nth_atmensanlinit )) + export memory_atmensanlinit="3072M" + ;; + + "atmensanlrun") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + export wtime_atmensanlrun="00:30:00" + export npe_atmensanlrun=$(( layout_x * layout_y * 6 )) + export npe_atmensanlrun_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + export npe_node_atmensanlrun=$(( npe_node_max / nth_atmensanlrun )) + export memory_atmensanlrun="96GB" + export is_exclusive=True + ;; + + "atmensanlfinal") + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + export npe_node_atmensanlfinal=$(( npe_node_max / nth_atmensanlfinal )) + export is_exclusive=True + ;; + + "eobs" | "eomg") + export wtime_eobs="00:15:00" + export wtime_eomg="00:30:00" + case ${CASE} in + "C768") export npe_eobs=200;; + "C384") export npe_eobs=100;; + "C192" | "C96" | "C48") export npe_eobs=40;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + export npe_node_eobs=$(( npe_node_max / nth_eobs )) + export is_exclusive=True + # The number of tasks and cores used must be the same for eobs + # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details + # For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + elif [[ ${machine} = "HERCULES" ]]; then + # For Hercules, this is only an issue at C384; use 20 tasks/node + if [[ ${CASE} = "C384" ]]; then + export npe_node_eobs=20 + fi + fi + export npe_node_eomg=${npe_node_eobs} + ;; + + "ediag") + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$(( npe_node_max / nth_ediag )) + export memory_ediag="30GB" + ;; + + "eupd") + export wtime_eupd="00:30:00" + case ${CASE} in + "C768") + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" == "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + ;; + "C384") + export npe_eupd=270 + export nth_eupd=8 + if [[ "${machine}" == "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ ${machine} == "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + ;; + "C192" | "C96" | "C48") + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" == "HERA" || "${machine}" == "JET" ]]; then + export nth_eupd=4 + fi + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_node_eupd=$(( npe_node_max / nth_eupd )) + export is_exclusive=True + ;; + + "ecen") + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" == "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} == "C384" || ${CASE} == "C192" || ${CASE} == "C96" || ${CASE} == "C48" ]]; then + export nth_ecen=2 + fi + export npe_node_ecen=$(( npe_node_max / nth_ecen )) + export nth_cycle=${nth_ecen} + export npe_node_cycle=$(( npe_node_max / nth_cycle )) + export is_exclusive=True + ;; + + "esfc") + export wtime_esfc="00:08:00" + export npe_esfc=80 + export nth_esfc=1 + export npe_node_esfc=$(( npe_node_max / nth_esfc )) + export nth_cycle=${nth_esfc} + export npe_node_cycle=$(( npe_node_max / nth_cycle )) + export memory_esfc="80GB" + ;; + + "epos") + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=1 + export npe_node_epos=$(( npe_node_max / nth_epos )) + export is_exclusive=True + ;; + + "postsnd") + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(( npe_node_postsnd * nth_postsnd )) + if (( postsnd_req_cores > npe_node_max )); then + export npe_node_postsnd=$(( npe_node_max / nth_postsnd )) + fi + export is_exclusive=True + ;; + + "awips") + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + ;; + + "npoess") + export wtime_npoess="03:30:00" + export npe_npoess=1 + export npe_node_npoess=1 + export nth_npoess=1 + export memory_npoess="3GB" + ;; + + "gempak") + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + ;; + + "mos_stn_prep") + export wtime_mos_stn_prep="00:10:00" + export npe_mos_stn_prep=3 + export npe_node_mos_stn_prep=3 + export nth_mos_stn_prep=1 + export memory_mos_stn_prep="5GB" + export NTASK="${npe_mos_stn_prep}" + export PTILE="${npe_node_mos_stn_prep}" + ;; + + "mos_grd_prep") + export wtime_mos_grd_prep="00:10:00" + export npe_mos_grd_prep=4 + export npe_node_mos_grd_prep=4 + export nth_mos_grd_prep=1 + export memory_mos_grd_prep="16GB" + export NTASK="${npe_mos_grd_prep}" + export PTILE="${npe_node_mos_grd_prep}" + ;; + + "mos_ext_stn_prep") + export wtime_mos_ext_stn_prep="00:15:00" + export npe_mos_ext_stn_prep=2 + export npe_node_mos_ext_stn_prep=2 + export nth_mos_ext_stn_prep=1 + export memory_mos_ext_stn_prep="5GB" + export NTASK="${npe_mos_ext_stn_prep}" + export PTILE="${npe_node_mos_ext_stn_prep}" + ;; + + "mos_ext_grd_prep") + export wtime_mos_ext_grd_prep="00:10:00" + export npe_mos_ext_grd_prep=7 + export npe_node_mos_ext_grd_prep=7 + export nth_mos_ext_grd_prep=1 + export memory_mos_ext_grd_prep="3GB" + export NTASK="${npe_mos_ext_grd_prep}" + export PTILE="${npe_node_mos_ext_grd_prep}" + ;; + + "mos_stn_fcst") + export wtime_mos_stn_fcst="00:10:00" + export npe_mos_stn_fcst=5 + export npe_node_mos_stn_fcst=5 + export nth_mos_stn_fcst=1 + export memory_mos_stn_fcst="40GB" + export NTASK="${npe_mos_stn_fcst}" + export PTILE="${npe_node_mos_stn_fcst}" + ;; + + "mos_grd_fcst") + export wtime_mos_grd_fcst="00:10:00" + export npe_mos_grd_fcst=7 + export npe_node_mos_grd_fcst=7 + export nth_mos_grd_fcst=1 + export memory_mos_grd_fcst="50GB" + export NTASK="${npe_mos_grd_fcst}" + export PTILE="${npe_node_mos_grd_fcst}" + ;; + + "mos_ext_stn_fcst") + export wtime_mos_ext_stn_fcst="00:20:00" + export npe_mos_ext_stn_fcst=3 + export npe_node_mos_ext_stn_fcst=3 + export nth_mos_ext_stn_fcst=1 + export memory_mos_ext_stn_fcst="50GB" + export NTASK="${npe_mos_ext_stn_fcst}" + export PTILE="${npe_node_mos_ext_stn_fcst}" + export prepost=True + ;; + + "mos_ext_grd_fcst") + export wtime_mos_ext_grd_fcst="00:10:00" + export npe_mos_ext_grd_fcst=7 + export npe_node_mos_ext_grd_fcst=7 + export nth_mos_ext_grd_fcst=1 + export memory_mos_ext_grd_fcst="50GB" + export NTASK="${npe_mos_ext_grd_fcst}" + export PTILE="${npe_node_mos_ext_grd_fcst}" + ;; + + "mos_stn_prdgen") + export wtime_mos_stn_prdgen="00:10:00" + export npe_mos_stn_prdgen=1 + export npe_node_mos_stn_prdgen=1 + export nth_mos_stn_prdgen=1 + export memory_mos_stn_prdgen="15GB" + export NTASK="${npe_mos_stn_prdgen}" + export PTILE="${npe_node_mos_stn_prdgen}" + export prepost=True + ;; + + "mos_grd_prdgen") + export wtime_mos_grd_prdgen="00:40:00" + export npe_mos_grd_prdgen=72 + export npe_node_mos_grd_prdgen=18 + export nth_mos_grd_prdgen=4 + export memory_mos_grd_prdgen="20GB" + export NTASK="${npe_mos_grd_prdgen}" + export PTILE="${npe_node_mos_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_grd_prdgen}" + ;; + + "mos_ext_stn_prdgen") + export wtime_mos_ext_stn_prdgen="00:10:00" + export npe_mos_ext_stn_prdgen=1 + export npe_node_mos_ext_stn_prdgen=1 + export nth_mos_ext_stn_prdgen=1 + export memory_mos_ext_stn_prdgen="15GB" + export NTASK="${npe_mos_ext_stn_prdgen}" + export PTILE="${npe_node_mos_ext_stn_prdgen}" + export prepost=True + ;; + + "mos_ext_grd_prdgen") + export wtime_mos_ext_grd_prdgen="00:30:00" + export npe_mos_ext_grd_prdgen=96 + export npe_node_mos_ext_grd_prdgen=6 + export nth_mos_ext_grd_prdgen=16 + export memory_mos_ext_grd_prdgen="30GB" + export NTASK="${npe_mos_ext_grd_prdgen}" + export PTILE="${npe_node_mos_ext_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_ext_grd_prdgen}" + ;; + + "mos_wx_prdgen") + export wtime_mos_wx_prdgen="00:10:00" + export npe_mos_wx_prdgen=4 + export npe_node_mos_wx_prdgen=2 + export nth_mos_wx_prdgen=2 + export memory_mos_wx_prdgen="10GB" + export NTASK="${npe_mos_wx_prdgen}" + export PTILE="${npe_node_mos_wx_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_prdgen}" + ;; + + "mos_wx_ext_prdgen") + export wtime_mos_wx_ext_prdgen="00:10:00" + export npe_mos_wx_ext_prdgen=4 + export npe_node_mos_wx_ext_prdgen=2 + export nth_mos_wx_ext_prdgen=2 + export memory_mos_wx_ext_prdgen="10GB" + export NTASK="${npe_mos_wx_ext_prdgen}" + export PTILE="${npe_node_mos_wx_ext_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_ext_prdgen}" + ;; + + *) + echo "FATAL ERROR: Invalid job ${step} passed to ${BASH_SOURCE[0]}" + exit 1 + ;; + +esac + +echo "END: config.resources" diff --git a/FV3GFSwfm/jkh/config.sfcanl b/FV3GFSwfm/jkh/config.sfcanl new file mode 100644 index 0000000000..e2fde8992a --- /dev/null +++ b/FV3GFSwfm/jkh/config.sfcanl @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +# Turn off NST in JEDIATMVAR +if [[ "${DO_JEDIATMVAR}" == "YES" ]]; then + export DONST="NO" +fi + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/jkh/config.snowanl b/FV3GFSwfm/jkh/config.snowanl new file mode 100644 index 0000000000..980036cf7b --- /dev/null +++ b/FV3GFSwfm/jkh/config.snowanl @@ -0,0 +1,30 @@ +#! /usr/bin/env bash + +########## config.snowanl ########## +# configuration common to snow analysis tasks + +echo "BEGIN: config.snowanl" + +# Get task specific resources +source "${EXPDIR}/config.resources" snowanl + +export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" + +# Name of the JEDI executable and its yaml template +export JEDIEXE="${EXECgfs}/fv3jedi_letkf.x" +export JEDIYAML="${PARMgfs}/gdas/snow/letkfoi/letkfoi.yaml.j2" + +# Ensemble member properties +export SNOWDEPTHVAR="snodl" +export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI + +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" +export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" + +export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" + +export io_layout_x=1 +export io_layout_y=1 + +echo "END: config.snowanl" diff --git a/FV3GFSwfm/jkh/config.stage_ic b/FV3GFSwfm/jkh/config.stage_ic new file mode 100644 index 0000000000..63d0e4a5cf --- /dev/null +++ b/FV3GFSwfm/jkh/config.stage_ic @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +########## config.stage_ic ########## + +echo "BEGIN: config.stage_ic" + +# Get task specific resources +source "${EXPDIR}/config.resources" stage_ic + +case "${CASE}" in + "C48" | "C96" | "C192") + export CPL_ATMIC="workflow_${CASE}_refactored" + export CPL_ICEIC="workflow_${CASE}_refactored" + export CPL_OCNIC="workflow_${CASE}_refactored" + export CPL_WAVIC="workflow_${CASE}_refactored" + ;; + "C384") + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c_refactored + export CPL_ICEIC=CPC_refactored + export CPL_OCNIC=CPC3Dvar_refactored + export CPL_WAVIC=workflow_C384_refactored + ;; + "C768") + export CPL_ATMIC=HR3C768 + export CPL_ICEIC=HR3marine + export CPL_OCNIC=HR3marine + export CPL_WAVIC=HR3marine + ;; + "C1152") + export CPL_ATMIC=HR3C1152 + export CPL_ICEIC=HR3marine + export CPL_OCNIC=HR3marine + export CPL_WAVIC=HR3marine + ;; + *) + echo "FATAL ERROR Unrecognized resolution: ${CASE}" + exit 1 + ;; +esac + +echo "END: config.stage_ic" diff --git a/FV3GFSwfm/jkh/config.tracker b/FV3GFSwfm/jkh/config.tracker new file mode 100644 index 0000000000..71fcf9196d --- /dev/null +++ b/FV3GFSwfm/jkh/config.tracker @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.tracker ########## +echo "BEGIN: config.tracker" + +# Get task specific resources +. "${EXPDIR}/config.resources" tracker + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.tracker" diff --git a/FV3GFSwfm/jkh/config.tropcy b/FV3GFSwfm/jkh/config.tropcy new file mode 100644 index 0000000000..718abe3be5 --- /dev/null +++ b/FV3GFSwfm/jkh/config.tropcy @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.tropcy ########## +echo "BEGIN: config.tropcy" + +# Tracker/genesis package location +export HOMEens_tracker=${BASE_GIT}/TC_tracker/${ens_tracker_ver} + +export SENDCOM="YES" # Needed by tracker scripts still + +export FHOUT_CYCLONE=6 +FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) +export FHMAX_CYCLONE + +echo "END: config.tropcy" diff --git a/FV3GFSwfm/jkh/config.ufs b/FV3GFSwfm/jkh/config.ufs new file mode 100644 index 0000000000..635868dcf8 --- /dev/null +++ b/FV3GFSwfm/jkh/config.ufs @@ -0,0 +1,512 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if (( $# <= 1 )); then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" + echo "--gocart" + echo "--catchem" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_gocart=true +skip_catchem=true +skip_mediator=true + +# Loop through named arguments +while (( $# > 0 )); do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + shift + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + shift + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + shift + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + shift + ;; + "--gocart") + skip_gocart=false + shift + ;; + "--catchem") + skip_catchem=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=2 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplchp=".false." +export cplwav=".false." +export cplwav2atm=".false." +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1" +model_list="atm" + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export cpl=".true." + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + source "${EXPDIR}/config.ocn" + export cplflx=".true." + model_list="${model_list}.ocean" + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.1deg.nc" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="2.5e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' + ;; + "050") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export TOPOEDITS + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING + export eps_imesh + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + source "${EXPDIR}/config.ice" + export cplice=".true." + model_list="${model_list}.ice" + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + source "${EXPDIR}/config.wave" + export cplwav=".true." + export cplwav2atm=".true." + model_list="${model_list}.wave" + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "glo_200") + ntasks_ww3=30 + nthreads_ww3=1 + ;; + "glo_500") + ntasks_ww3=12 + nthreads_ww3=1 + ;; + "mx025") + ntasks_ww3=80 + ;; + "uglo_100km") + ntasks_ww3=40 + nthreads_ww3=1 + ;; + "uglo_m1g16") + ntasks_ww3=1000 + nthreads_ww3=1 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +# GOCART specific settings +if [[ "${skip_gocart}" == "false" ]]; then + source "${EXPDIR}/config.aero" + export cplchm=".true." + model_list="${model_list}.aero" +fi + +# CATChem specific settings +if [[ "${skip_catchem}" == "false" ]]; then + source "${EXPDIR}/config.catchem" + export cplchp=".true." + export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_catchem" + export GBDAY=1 +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1_catchem" + export GBDAY=35 +fi + + model_list="${model_list}.catchem" +fi + + +# Set the name of the UFS (previously nems) configure template to use +# Default ufs.configure templates for supported model configurations +case "${model_list}" in + atm) + default_template="${PARMgfs}/ufs/ufs.configure.atm.IN" + ;; + atm.aero) + default_template="${PARMgfs}/ufs/ufs.configure.atmaero.IN" + ;; + atm.catchem) + default_template="${HOMEgfs}/parm/ufs/ufs.configure.atmcatchem.IN" + ;; + atm.wave) + default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav.IN" + ;; + atm.ocean.ice) + default_template="${PARMgfs}/ufs/ufs.configure.s2s_esmf.IN" + ;; + atm.ocean.ice.aero) + default_template="${PARMgfs}/ufs/ufs.configure.s2sa_esmf.IN" + ;; + atm.ocean.ice.catchem) + default_template="${HOMEgfs}/parm/ufs/ufs.configure.s2scatchem_esmf.IN" + ;; + atm.ocean.ice.wave) + default_template="${PARMgfs}/ufs/ufs.configure.s2sw_esmf.IN" + ;; + atm.ocean.ice.wave.aero) + default_template="${PARMgfs}/ufs/ufs.configure.s2swa_esmf.IN" + ;; + atm.ocean.ice.wave.catchem) + default_template="${HOMEgfs}/parm/ufs/ufs.configure.s2swcatchem_esmf.IN" + ;; + *) + echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" + exit 16 + ;; +esac + +# Allow user to override the default template +export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} +unset model_list default_template + +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." + exit 17 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/jkh/config.upp b/FV3GFSwfm/jkh/config.upp new file mode 100644 index 0000000000..41015c2fee --- /dev/null +++ b/FV3GFSwfm/jkh/config.upp @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.upp ########## +# UPP specific + +echo "BEGIN: config.upp" + +# Get task specific resources +. "${EXPDIR}/config.resources" upp + +export UPP_CONFIG="${PARMgfs}/post/upp.yaml" + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.upp" diff --git a/FV3GFSwfm/jkh/config.verfozn b/FV3GFSwfm/jkh/config.verfozn new file mode 100644 index 0000000000..df7d18012d --- /dev/null +++ b/FV3GFSwfm/jkh/config.verfozn @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.verfozn ########## +echo "BEGIN: config.verfozn" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfozn + +export DO_DATA_RPT=1 +export OZN_AREA="glb" +export OZNMON_SUFFIX=${NET} +export SATYPE_FILE=${PARMgfs}/monitor/gdas_oznmon_satype.txt + +# Source the parm file +. "${PARMgfs}/monitor/gdas_oznmon.parm" + +# Set up validation file +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + export ozn_val_file=${PARMgfs}/monitor/gdas_oznmon_base.tar +fi + +echo "END: config.verfozn" diff --git a/FV3GFSwfm/jkh/config.verfrad b/FV3GFSwfm/jkh/config.verfrad new file mode 100644 index 0000000000..506ce50b4f --- /dev/null +++ b/FV3GFSwfm/jkh/config.verfrad @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +########## config.verfrad ########## +echo "BEGIN: config.verfrad" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfrad + +export satype_file=${PARMgfs}/monitor/gdas_radmon_satype.txt + +# Source the parm file +. "${PARMgfs}/monitor/da_mon.parm" + +# Other variables +export RAD_AREA="glb" +export MAKE_CTL=1 +export MAKE_DATA=1 +export USE_ANL=1 +export DO_DIAG_RPT=1 +export DO_DATA_RPT=1 + +export RADMON_SUFFIX=${RUN} +export CYCLE_INTERVAL=${assim_freq:-6} +export VERBOSE="YES" + +echo "END: config.verfrad" diff --git a/FV3GFSwfm/jkh/config.vminmon b/FV3GFSwfm/jkh/config.vminmon new file mode 100644 index 0000000000..7c7d362161 --- /dev/null +++ b/FV3GFSwfm/jkh/config.vminmon @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.vminmon ########## +echo "BEGIN: config.vminmon" + +# Get task specific resources +. "${EXPDIR}/config.resources" vminmon + +export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} +export CYCLE_INTERVAL=${assim_freq:-6} + +export mm_gnormfile=${PARMgfs}/monitor/${RUN}_minmon_gnorm.txt +export mm_costfile=${PARMgfs}/monitor/${RUN}_minmon_cost.txt + +echo "END: config.vminmon" diff --git a/FV3GFSwfm/jkh/config.wave b/FV3GFSwfm/jkh/config.wave new file mode 100644 index 0000000000..6fbce69996 --- /dev/null +++ b/FV3GFSwfm/jkh/config.wave @@ -0,0 +1,207 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD='' + export wavepostGRD='gwes_30m' + export waveuoutpGRD=${waveGRD} + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD='' + export wavepostGRD='glo_025' + export waveuoutpGRD=${waveGRD} + ;; + "glo_200") + #Global regular lat/lon 2deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_200' + export waveuoutpGRD=${waveGRD} + ;; + "glo_500") + #Global regular lat/lon 5deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_500' + export waveuoutpGRD=${waveGRD} + ;; + "uglo_100km") + #unstructured 100km grid + export waveinterpGRD='glo_200' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "uglo_m1g16") + #unstructured m1v16 grid + export waveinterpGRD='glo_15mxt' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + *) + echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." + exit 1 + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [[ "${CDUMP}" = "gdas" ]]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=${FHMAX_GFS} +fi +export WAVHINDH=0 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) +export DTPNT_WAV=3600 +export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) + +# Selected output parameters (gridded) +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" + +# Restart file config +if [[ "${CDUMP}" = "gdas" ]]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=48 # RTOFS forecasts only out to 8 days +elif [[ ${gfs_cyc} -ne 0 ]]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [[ "${CDUMP}" != gfs ]]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) # TODO: This calculation needs to move to parsing_namelists_WW3.sh + if [[ ${rst_dt_gfs} -gt 0 ]]; then + export DT_1_RST_WAV=0 #${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + #temporarily set to zero to avoid a clash in requested restart times + #which makes the wave model crash a fix for the model issue will be coming + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [[ ${RUNMEM} = -1 ]]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB="${RUNMEM: -2}" +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ ${DO_ICE} == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ ${DO_OCN} == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/jkh/config.waveawipsbulls b/FV3GFSwfm/jkh/config.waveawipsbulls new file mode 100644 index 0000000000..65a8d5076b --- /dev/null +++ b/FV3GFSwfm/jkh/config.waveawipsbulls @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/jkh/config.waveawipsgridded b/FV3GFSwfm/jkh/config.waveawipsgridded new file mode 100644 index 0000000000..bd7c7c11e4 --- /dev/null +++ b/FV3GFSwfm/jkh/config.waveawipsgridded @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/jkh/config.wavegempak b/FV3GFSwfm/jkh/config.wavegempak new file mode 100644 index 0000000000..bcbec91f07 --- /dev/null +++ b/FV3GFSwfm/jkh/config.wavegempak @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/jkh/config.waveinit b/FV3GFSwfm/jkh/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/jkh/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/jkh/config.wavepostbndpnt b/FV3GFSwfm/jkh/config.wavepostbndpnt new file mode 100644 index 0000000000..412c5fb42a --- /dev/null +++ b/FV3GFSwfm/jkh/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/jkh/config.wavepostbndpntbll b/FV3GFSwfm/jkh/config.wavepostbndpntbll new file mode 100644 index 0000000000..6695ab0f84 --- /dev/null +++ b/FV3GFSwfm/jkh/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/jkh/config.wavepostpnt b/FV3GFSwfm/jkh/config.wavepostpnt new file mode 100644 index 0000000000..e87237da82 --- /dev/null +++ b/FV3GFSwfm/jkh/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/jkh/config.wavepostsbs b/FV3GFSwfm/jkh/config.wavepostsbs new file mode 100644 index 0000000000..b3c5902e3c --- /dev/null +++ b/FV3GFSwfm/jkh/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -n "${waveinterpGRD}" ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/jkh/config.waveprep b/FV3GFSwfm/jkh/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/jkh/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/jkh/jkh.crontab b/FV3GFSwfm/jkh/jkh.crontab new file mode 100644 index 0000000000..275080feff --- /dev/null +++ b/FV3GFSwfm/jkh/jkh.crontab @@ -0,0 +1,5 @@ + +#################### jkh #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.6/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh/jkh.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh/jkh.xml +################################################################# diff --git a/FV3GFSwfm/jkh/jkh.xml b/FV3GFSwfm/jkh/jkh.xml new file mode 100644 index 0000000000..4d97787732 --- /dev/null +++ b/FV3GFSwfm/jkh/jkh.xml @@ -0,0 +1,326 @@ + + + + + + +]> + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh/logs/@Y@m@d@H.log + + + 202403200000 202403200000 24:00:00 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/stage_ic.sh + + jkh_gfsstage_ic_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfsstage_ic.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + + + + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_ctrl.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile1.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile2.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile3.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile4.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile5.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile6.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile1.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile2.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile3.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile4.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile5.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile6.nc + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/fcst.sh + + jkh_gfsfcst_@H + gsd-fv3 + batch + hera + 06:00:00 + 32:ppn=40:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + + + + + + + + + + + + _f000-f006 _f009-f015 _f018-f024 _f027-f033 _f036-f042 _f045-f051 _f054-f060 _f063-f069 _f072-f078 _f081-f087 _f090-f096 _f099-f105 _f108-f114 _f117-f120 + f006 f015 f024 f033 f042 f051 f060 f069 f078 f087 f096 f105 f114 f120 + f000_f003_f006 f009_f012_f015 f018_f021_f024 f027_f030_f033 f036_f039_f042 f045_f048_f051 f054_f057_f060 f063_f066_f069 f072_f075_f078 f081_f084_f087 f090_f093_f096 f099_f102_f105 f108_f111_f114 f117_f120 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/atmos_products.sh + + jkh_gfsatmos_prod#grp#_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=24:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfsatmos_prod#grp#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + FHRLST#lst# + COMPONENTatmos + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep# + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/tracker.sh + + jkh_gfstracker_@H + gsd-fv3 + batch + hera + 00:10:00 + 1:ppn=1:tpp=1 + 4G + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfstracker.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/genesis.sh + + jkh_gfsgenesis_@H + gsd-fv3 + batch + hera + 00:25:00 + 1:ppn=1:tpp=1 + 10G + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfsgenesis.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + + + + + + + + + + g2g1 g2o1 pcp1 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/metp.sh + + jkh_gfsmetp#metpcase#_@H + gsd-fv3 + batch + hera + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + SDATE_GFS2024032000 + METPCASE#metpcase# + + + + + + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/arch.sh + + jkh_gfsarch_@H + gsd-fv3 + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + + + + + + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/cleanup.sh + + jkh_gfscleanup_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/jkh/logs/@Y@m@d@H/gfscleanup.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/jkh + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/RUNDIRS/jkh + + + + + + + + + + diff --git a/FV3GFSwfm/test_catchem/CAT_C96_CCPP.xml b/FV3GFSwfm/test_catchem/CAT_C96_CCPP.xml new file mode 100644 index 0000000000..512bc4ad86 --- /dev/null +++ b/FV3GFSwfm/test_catchem/CAT_C96_CCPP.xml @@ -0,0 +1,638 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1:ppn=1:tpp=1"> + + + + + + + 1:ppn=1:tpp=1"> + + + + + + 1:ppn=40:tpp=1"> + + + + + + + 1:ppn=40:tpp=1"> + + + + + + 2:ppn=40:tpp=1"> + + + + + + 1:ppn=2"> + + + + + + 5:ppn=40:tpp=1"> + + + + + + 10:ppn=12:tpp=1"> + + + + + + 1:ppn=40:tpp=1"> + + + + + + 7:ppn=40:tpp=1"> + + + + + + 7:ppn=40:tpp=1"> + + + + + + 7:ppn=40:tpp=1"> + + + + + + 1:ppn=40:tpp=1"> + + + + + + 1:ppn=40:tpp=1"> + + + + + + 1:ppn=40:tpp=1"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + 2:ppn=1:tpp=1"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + 20:ppn=2:tpp=1"> + + + + + + 5:ppn=4:tpp=3"> + + + + + + 1:ppn=4:tpp=2"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + + 1:ppn=4:tpp=1"> + + + + + + 1:ppn=1:tpp=1"> + + + + + + + 1:ppn=1:tpp=1"> + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + &SDATE; &EDATE; &INTERVAL; + + + + /scratch2/BMC/gsd-fv3-dev/lzhang/UFS-dev/global-workflow/jobs/rocoto/stage_ic.sh + + CAT_C96_CCPP_gfsstage_ic_@H + &ACCOUNT; + &QUEUE_INIT_GFS; + &PARTITION_INIT_GFS; + &RESOURCES_INIT_GFS; + &WALLTIME_INIT_GFS; + &NATIVE_INIT_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsstage_ic.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NET&CDUMP; + CDUMP&CDUMP; + RUN&CDUMP; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/Kate.Zhang/fv3gfs/comrot + DATAROOT/scratch1/NCEPDEV/stmp2/Kate.Zhang/RUNDIRS/CAT_C96_CCPP + + + + + + &JOBS_DIR;/aerosol_init.sh + + &PSLOT;_gfsaerosol_init_@H + &ACCOUNT; + &QUEUE_AEROSOL_INIT_GFS; + &PARTITION_AEROSOL_INIT_GFS; + &RESOURCES_AEROSOL_INIT_GFS; + &WALLTIME_AEROSOL_INIT_GFS; + &MEMORY_AEROSOL_INIT_GFS; + &NATIVE_AEROSOL_INIT_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsaerosol_init.log + + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NET&CDUMP; + CDUMP&CDUMP; + RUN&CDUMP; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/Kate.Zhang/fv3gfs/comrot + DATAROOT/scratch1/NCEPDEV/stmp2/Kate.Zhang/RUNDIRS/CAT_C96_CCPP + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/gfs_ctrl.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/gfs_data.tile1.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/gfs_data.tile2.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/gfs_data.tile3.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/gfs_data.tile4.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/gfs_data.tile5.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/gfs_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_core.res.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_core.res.tile1.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_core.res.tile2.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_core.res.tile3.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_core.res.tile4.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_core.res.tile5.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_core.res.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_tracer.res.tile1.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_tracer.res.tile2.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_tracer.res.tile3.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_tracer.res.tile4.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_tracer.res.tile5.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.fv_tracer.res.tile6.nc + + + + + + + + &JOBS_DIR;/prepchem.sh + + &PSLOT;_prepchem_@H + &ACCOUNT; + &QUEUE_PREP_CHEM_SRC; + &PARTITION_PREP_CHEM_SRC; + &RESOURCES_PREP_CHEM_SRC; + &WALLTIME_PREP_CHEM_SRC; + &NATIVE_PREP_CHEM_SRC; + &ROTDIR;/logs/@Y@m@d@H/prepchem.log + + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NET&CDUMP; + CDUMP&CDUMP; + RUN&CDUMP; + CDATE@Y@m@d@H + CDUMP&CDUMP; + PDY@Y@m@d + SYEAR@Y + SMONTH@m + SDAY@d + SHOUR@H + cyc@H + + + + + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + &ACCOUNT; + &QUEUE_FCST_GFS; + &PARTITION_FCST_GFS; + &RESOURCES_FCST_GFS; + &WALLTIME_FCST_GFS; + + &NATIVE_FCST_GFS; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIR&RUN_ENVIR; + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NET&CDUMP; + CDUMP&CDUMP; + RUN&CDUMP; + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/Kate.Zhang/fv3gfs/comrot + DATAROOT/scratch1/NCEPDEV/stmp2/Kate.Zhang/RUNDIRS/CAT_C96_CCPP + + + + + + + + + + + + + + + + diff --git a/FV3GFSwfm/test_catchem/config.aero b/FV3GFSwfm/test_catchem/config.aero new file mode 100644 index 0000000000..c152fafd12 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.aero @@ -0,0 +1,46 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Turn off warnings about unused variables +# shellcheck disable=SC2034 + + +# Path to the input data tree +case ${machine} in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION" | "HERCULES") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine ${machine} unsupported for aerosols" + exit 2 + ;; +esac +export AERO_INPUTS_DIR + +export AERO_DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table.aero" +export AERO_FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table.aero" +# Biomass burning emission dataset. Choose from: gbbepx, qfed, none +export AERO_EMIS_FIRE="qfed" +# Directory containing GOCART configuration files +export AERO_CONFIG_DIR="${PARMgfs}/ufs/gocart" + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +export fscav_aero="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +export dnats_aero=2 diff --git a/FV3GFSwfm/test_catchem/config.aeroanl b/FV3GFSwfm/test_catchem/config.aeroanl new file mode 100644 index 0000000000..c1752745ae --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.aeroanl @@ -0,0 +1,31 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" +export STATICB_TYPE='identity' +export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" +export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" +export BERROR_DATE="20160630.000000" + +export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE="${EXECgfs}/fv3jedi_var.x" + +if [[ "${DOIAU}" == "YES" ]]; then + export aero_bkg_times="3,6,9" + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2" +else + export aero_bkg_times="6" + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2" +fi + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/test_catchem/config.aeroanlfinal b/FV3GFSwfm/test_catchem/config.aeroanlfinal new file mode 100644 index 0000000000..34e5d8f116 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/test_catchem/config.aeroanlinit b/FV3GFSwfm/test_catchem/config.aeroanlinit new file mode 100644 index 0000000000..7036d3d27b --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/test_catchem/config.aeroanlrun b/FV3GFSwfm/test_catchem/config.aeroanlrun new file mode 100644 index 0000000000..012e5b79f3 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +source "${EXPDIR}/config.resources" aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/test_catchem/config.aerosol_init b/FV3GFSwfm/test_catchem/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/test_catchem/config.anal b/FV3GFSwfm/test_catchem/config.anal new file mode 100644 index 0000000000..09aaa15a98 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.anal @@ -0,0 +1,153 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgfs}/gsi/global_convinfo.txt +export OZINFO=${FIXgfs}/gsi/global_ozinfo.txt +export SATINFO=${FIXgfs}/gsi/global_satinfo.txt +export OBERROR=${FIXgfs}/gsi/prepobs_errtable.global + +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export hofx_2m_sfcfile=".true." + export reducedgrid=".false." # not possible for sfc analysis, Jeff Whitaker says it's not useful anyway + export paranc=".false." # temporary until sfc io coded for parance (PR being prepared by T. Gichamo) + export CONVINFO=${FIXgfs}/gsi/global_convinfo_2mObs.txt + export ANAVINFO=${FIXgfs}/gsi/global_anavinfo_soilanal.l127.txt +fi + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgfs}/gsi/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/test_catchem/config.analcalc b/FV3GFSwfm/test_catchem/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/test_catchem/config.analdiag b/FV3GFSwfm/test_catchem/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/test_catchem/config.arch b/FV3GFSwfm/test_catchem/config.arch new file mode 100644 index 0000000000..a23bcce6ae --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.arch @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} + +echo "END: config.arch" diff --git a/FV3GFSwfm/test_catchem/config.atmanl b/FV3GFSwfm/test_catchem/config.atmanl new file mode 100644 index 0000000000..88d47cbdb2 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmanl @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/gdas_prototype_3d.yaml.j2" +export JEDIYAML="${PARMgfs}/gdas/atm/variational/3dvar_drpcg.yaml.j2" +export STATICB_TYPE="gsibec" +export INTERP_METHOD='barycentric' + +if [[ ${DOHYBVAR} = "YES" ]]; then + # shellcheck disable=SC2153 + export CASE_ANL=${CASE_ENS} + export BERROR_YAML="${PARMgfs}/gdas/atm/berror/hybvar_${STATICB_TYPE}.yaml.j2" +else + export CASE_ANL=${CASE} + export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" +fi + +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + +export layout_x_atmanl=8 +export layout_y_atmanl=8 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${EXECgfs}/fv3jedi_var.x + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/test_catchem/config.atmanlfinal b/FV3GFSwfm/test_catchem/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/test_catchem/config.atmanlinit b/FV3GFSwfm/test_catchem/config.atmanlinit new file mode 100644 index 0000000000..1aec88bcc2 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmanlinit @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit + +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/test_catchem/config.atmanlrun b/FV3GFSwfm/test_catchem/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/test_catchem/config.atmensanl b/FV3GFSwfm/test_catchem/config.atmensanl new file mode 100644 index 0000000000..edc5a0f8df --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmensanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" +export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" +export INTERP_METHOD='barycentric' + +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + +export layout_x_atmensanl=8 +export layout_y_atmensanl=8 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${EXECgfs}/fv3jedi_letkf.x + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/test_catchem/config.atmensanlfinal b/FV3GFSwfm/test_catchem/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/test_catchem/config.atmensanlinit b/FV3GFSwfm/test_catchem/config.atmensanlinit new file mode 100644 index 0000000000..0eee2ffa82 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmensanlinit @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit + +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/test_catchem/config.atmensanlrun b/FV3GFSwfm/test_catchem/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/test_catchem/config.atmos_products b/FV3GFSwfm/test_catchem/config.atmos_products new file mode 100644 index 0000000000..451f5eff86 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.atmos_products @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +########## config.atmos_products ########## +# atmosphere grib2 products specific + +echo "BEGIN: config.atmos_products" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmos_products + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +# Scripts used by this job +export INTERP_ATMOS_MASTERSH="${USHgfs}/interp_atmos_master.sh" +export INTERP_ATMOS_SFLUXSH="${USHgfs}/interp_atmos_sflux.sh" + +if [[ "${RUN:-}" == "gdas" ]]; then + export downset=1 + export FHOUT_PGBS=${FHOUT:-1} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="NO" # Create interpolated sflux.1p00 file + export WGNE="NO" # WGNE products are created for first FHMAX_WGNE forecast hours + export FHMAX_WGNE=0 +elif [[ "${RUN:-}" == "gfs" ]]; then + export downset=2 + export FHOUT_PGBS=${FHOUT_GFS:-3} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="YES" # Create interpolated sflux.1p00 file + export WGNE="YES" # WGNE products are created for first FHMAX_WGNE forecast hours + export FHMAX_WGNE=180 +fi + +export APCP_MSG="597" # Message number for APCP in GFSv16. Look for TODO in exglobal_atmos_products.sh + +# paramlist files for the different forecast hours and downsets +export paramlista="${PARMgfs}/product/gfs.fFFF.paramlist.a.txt" +export paramlista_anl="${PARMgfs}/product/gfs.anl.paramlist.a.txt" +export paramlista_f000="${PARMgfs}/product/gfs.f000.paramlist.a.txt" +export paramlistb="${PARMgfs}/product/gfs.fFFF.paramlist.b.txt" + +echo "END: config.atmos_products" diff --git a/FV3GFSwfm/test_catchem/config.awips b/FV3GFSwfm/test_catchem/config.awips new file mode 100644 index 0000000000..61f0dc5652 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.awips @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. "${EXPDIR}/config.resources" awips + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/test_catchem/config.base b/FV3GFSwfm/test_catchem/config.base new file mode 100644 index 0000000000..2deec734e5 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.base @@ -0,0 +1,396 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +export HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march +export EXECgfs="${HOMEgfs}/exec" +export FIXgfs="${HOMEgfs}/fix" +export PARMgfs="${HOMEgfs}/parm" +export SCRgfs="${HOMEgfs}/scripts" +export USHgfs="${HOMEgfs}/ush" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun" # TODO: set via prod_envir in Ops +export COMINsyn="/scratch1/NCEPDEV/global/glopara/com/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" +export BASE_CPLIC="/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${HOMEgfs}/FV3GFSrun" +export PTMP="${HOMEgfs}/FV3GFSrun" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products +export DO_TRACKER="YES" # Hurricane track verification +export DO_GENESIS="YES" # Cyclone genesis verification +export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2024032000 +export EDATE=2024032000 +export EXP_WARM_START=".false." +export assim_freq=24 ## JKH +export PSLOT="test_catchem" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" +export ICSDIR="/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127" +#JKHexport ICSORG="/scratch1/BMC/gsd-fv3/exp/UFS-CAMsuite_dev1/FV3GFSrun/c96l64ics_v15" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +# shellcheck disable=SC2016 +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export DO_CATChem="YES" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="C384" +export CASE_ENS="@CASEENS@" +export OCNRES="025" +export ICERES="${OCNRES}" +# These are the currently recommended grid-combinations +case "${CASE}" in + "C48") + export waveGRD='uglo_100km' + ;; + "C96" | "C192") + export waveGRD='uglo_100km' + ;; + "C384") + export waveGRD='uglo_100km' + ;; + "C768" | "C1152") + export waveGRD='uglo_m1g16' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT_OCNICE=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 +export FHMAX_GFS=120 +export FHOUT_GFS=3 +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +export FHOUT_OCNICE_GFS=6 +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=24 +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDISNOWDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=0 +export NMEM_ENS_GFS=0 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +export GSI_SOILANAL=NO + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="YES" # Run fit to observations package +export DO_VRFY_OCEANDA="NO" # Run SOCA Ocean and Seaice DA verification tasks + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +# The monitor jobs are not yet supported for JEDIATMVAR +if [[ ${DO_JEDIATMVAR} = "YES" ]]; then + export DO_VERFOZN="NO" # Ozone data assimilation monitoring + export DO_VERFRAD="NO" # Radiance data assimilation monitoring + export DO_VMINMON="NO" # GSI minimization monitoring +fi + +echo "END: config.base" diff --git a/FV3GFSwfm/test_catchem/config.catchem b/FV3GFSwfm/test_catchem/config.catchem new file mode 100644 index 0000000000..020c355235 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.catchem @@ -0,0 +1,53 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Turn off warnings about unused variables +# shellcheck disable=SC2034 + + +# Path to the input data tree +case ${machine} in + "HERA") + AERO_INPUTS_DIR="/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop" + ;; + "ORION" | "HERCULES") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine ${machine} unsupported for aerosols" + exit 2 + ;; +esac +export AERO_INPUTS_DIR + +export AERO_DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table.catchem" +export AERO_FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table.catchem" +# Biomass burning emission dataset. Choose from: gbbepx, qfed, none +export EMITYPE=2 # 1: MODIS, 2: GBBEPx +export EMIYEAR=2019 # 2014: CEDS, 2019: CEDS + + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +export fscav_aero="'*:0.2','so2:0.1','msa:0.1','dms:0.1','sulf:0.4','bc1:0.3','bc2:0.3','oc1:0.1','oc2:0.1','seas1:0.45','seas2:0.45','seas3:0.45','seas4:0.45','seas5:0.45','dust1:0.3','dust2:0.3','dust3:0.3','dust4:0.3','dust5:0.3'" +export dust_opt_cplchp="${dust_opt_cplchp:-5}" +export dust_alpha_catc="${dust_alpha_catc:-0.04}" +export dust_gamma_catc="${dust_gamma_catc:-1.0}" +export seas_emis_scale="${seas_emis_scale:-"1.,1.,1.,1.,1."}" +# Large scale wet deposition option +export wetdep_ls_cplchp="${wetdep_ls_cplchp:-0}" + +# +# Numbe rof diagnostic aerosol tracers (default: 0) +export dnats_aero=0 diff --git a/FV3GFSwfm/test_catchem/config.cleanup b/FV3GFSwfm/test_catchem/config.cleanup new file mode 100644 index 0000000000..1908c91bb5 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.cleanup @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.cleanup ########## +echo "BEGIN: config.cleanup" + +# Get task specific resources +source "${EXPDIR}/config.resources" cleanup + +export CLEANUP_COM="YES" # NO=retain ROTDIR. YES default in cleanup.sh + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +# Specify the list of files to exclude from the first stage of cleanup +# Because arrays cannot be exported, list is a single string of comma- +# separated values. This string is split to form an array at runtime. +case ${RUN} in + gdas | gfs) exclude_string="*prepbufr*, *cnvstat*, *atmanl.nc" ;; + enkf*) exclude_string="*f006.ens*" ;; + *) exclude_string="" ;; +esac +export exclude_string + +echo "END: config.cleanup" \ No newline at end of file diff --git a/FV3GFSwfm/test_catchem/config.com b/FV3GFSwfm/test_catchem/config.com new file mode 100644 index 0000000000..2f99e709ea --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.com @@ -0,0 +1,97 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_ROTDIR_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_CONF_TMPL=${COM_BASE}'/conf' +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_SNOW_ANALYSIS_TMPL=${COM_BASE}'/analysis/snow' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2' +declare -rx COM_ATMOS_GRIB_GRID_TMPL=${COM_ATMOS_GRIB_TMPL}'/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_OZNMON_TMPL=${COM_BASE}'/products/atmos/oznmon' +declare -rx COM_ATMOS_RADMON_TMPL=${COM_BASE}'/products/atmos/radmon' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' +declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' +declare -rx COM_ICE_NETCDF_TMPL=${COM_BASE}'/products/ice/netcdf' +declare -rx COM_ICE_GRIB_TMPL=${COM_BASE}'/products/ice/grib2' +declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/test_catchem/config.earc b/FV3GFSwfm/test_catchem/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/test_catchem/config.ecen b/FV3GFSwfm/test_catchem/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/test_catchem/config.echgres b/FV3GFSwfm/test_catchem/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/test_catchem/config.ediag b/FV3GFSwfm/test_catchem/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/test_catchem/config.efcs b/FV3GFSwfm/test_catchem/config.efcs new file mode 100644 index 0000000000..0cbc6d0830 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.efcs @@ -0,0 +1,102 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# Turn off components in ensemble +# export DO_AERO="NO" +# export DO_OCN="NO" +# export DO_ICE="NO" +export DO_WAVE="NO" + +export CASE="${CASE_ENS}" + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +# Ocean/Ice/Waves ensemble configurations are identical to deterministic member +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +[[ "${DO_CATChem}" == "YES" ]] && string="${string} --catchem" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + +# Get task specific resources +. "${EXPDIR}/config.resources" efcs + +# nggps_diag_nml +export FHOUT=${FHOUT_ENKF:-3} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT}} +fi + +# model_configure +export FHMIN=${FHMIN_ENKF:-3} +export FHMAX=${FHMAX_ENKF:-9} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX}} +fi + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [[ "${QUILTING}" == ".true." ]] && [[ "${OUTPUT_GRID}" == "gaussian_grid" ]]; then + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" +else + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da_orig" +fi + +# Model config option for Ensemble +# export TYPE=nh # choices: nh, hydro +# export MONO=non-mono # choices: mono, non-mono + +# gfs_physics_nml +export FHSWR=3600. +export FHLWR=3600. +export IEMS=1 +export ISOL=2 +export ICO2=2 +export dspheat=".true." +export shal_cnv=".true." +export FHZER=6 + +# Set PREFIX_ATMINC to r when recentering on +if [[ ${RECENTER_ENKF:-"YES"} == "YES" ]]; then + export PREFIX_ATMINC="r" +fi + +# For IAU, write restarts at beginning of window also +if [[ "${DOIAU_ENKF:-}" == "YES" ]]; then + export restart_interval="3" +else + export restart_interval="6" +fi + +echo "END: config.efcs" diff --git a/FV3GFSwfm/test_catchem/config.eobs b/FV3GFSwfm/test_catchem/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/test_catchem/config.epos b/FV3GFSwfm/test_catchem/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/test_catchem/config.esfc b/FV3GFSwfm/test_catchem/config.esfc new file mode 100644 index 0000000000..684dea4ee3 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.esfc @@ -0,0 +1,30 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [[ ${DOIAU_ENKF} = "YES" ]]; then + export DOSFCANL_ENKF="NO" +fi + +# Turn off NST in JEDIATMENS +if [[ "${DO_JEDIATMENS}" == "YES" ]]; then + export DONST="NO" +fi + +# set up soil analysis +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export DO_LNDINC=".true." + export LND_SOI_FILE="lnd_incr" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/test_catchem/config.eupd b/FV3GFSwfm/test_catchem/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/test_catchem/config.fcst b/FV3GFSwfm/test_catchem/config.fcst new file mode 100644 index 0000000000..9b11fe4fef --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.fcst @@ -0,0 +1,295 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case ${WAVE_CDUMP} in + both | "${CDUMP/enkf}" ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +[[ "${DO_CATChem}" == "YES" ]] && string="${string} --catchem" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + +# Forecast length for GFS forecast +case ${RUN} in + *gfs) + # shellcheck disable=SC2153 + export FHMAX=${FHMAX_GFS} + # shellcheck disable=SC2153 + export FHOUT=${FHOUT_GFS} + export FHMAX_HF=${FHMAX_HF_GFS} + export FHOUT_HF=${FHOUT_HF_GFS} + export FHOUT_OCNICE=${FHOUT_OCNICE_GFS} + ;; + *gdas) + export FHMAX_HF=0 + export FHOUT_HF=0 + ;; + *) + echo "FATAL ERROR: Unsupported RUN '${RUN}'" + exit 1 +esac + +# Get task specific resources +source "${EXPDIR}/config.resources" fcst +export domains_stack_size="16000000" + + +if [[ "${DONST}" == "YES" ]]; then + source "${EXPDIR}/config.nsst" +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### + +export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" +#export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if (( gwd_opt == 1 )); then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if (( gwd_opt == 2 )); then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=1 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".false." + export do_ugwp_v1=".true." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".true." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".true." + export do_ugwp_v1_orog_only=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level + if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then + export cdmbgwd=${cdmbgwd_gsl} + fi +fi + +# Sponge layer settings +export tau=0. +export rf_cutoff=10. +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulance schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +if [[ $CCPP_SUITE = "FV3_GFS_v17_p8_ugwpv1_catchem" ]] || [[ $CCPP_SUITE = "FV3_GFS_v17_coupled_p8_ugwpv1_catchem" ]] ; then + export IAER=2011 ; #spectral band mapping method for aerosol optical properties from online chemical model +else + export IAER=1011 ; #spectral band mapping method for aerosol optical properties from MERRA-2 data +fi +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +case ${imp_physics} in + 99) # ZhaoCarr + export ncld=1 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + ;; + 6) # WSM6 + export ncld=2 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export nwat=6 + ;; + 8) # Thompson + export ncld=2 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".true." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + ;; + 11) # GFDL + export ncld=5 + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + ;; + *) echo "Unknown microphysics option, ABORT!" ;; +esac + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="ufs.frac" +if [[ "${FRAC_GRID:-".true."}" == ".false." ]]; then + export cplmode="ufs.nfrac" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- +if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" + + if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval="3" + else + export restart_interval="6" + fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval=${restart_interval_gfs:-12} + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "${CASE}" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/test_catchem/config.fit2obs b/FV3GFSwfm/test_catchem/config.fit2obs new file mode 100644 index 0000000000..9b3fb87ead --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${FIXgfs}/gsi/prepobs_errtable.global +export HYBLEVS=${FIXgfs}/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/test_catchem/config.gempak b/FV3GFSwfm/test_catchem/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/test_catchem/config.genesis b/FV3GFSwfm/test_catchem/config.genesis new file mode 100644 index 0000000000..62a1bf88c0 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.genesis @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis ########## +echo "BEGIN: config.genesis" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis" diff --git a/FV3GFSwfm/test_catchem/config.genesis_fsu b/FV3GFSwfm/test_catchem/config.genesis_fsu new file mode 100644 index 0000000000..13948592c4 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.genesis_fsu @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis_fsu ########## +echo "BEGIN: config.genesis_fsu" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis_fsu + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis_fsu" diff --git a/FV3GFSwfm/test_catchem/config.ice b/FV3GFSwfm/test_catchem/config.ice new file mode 100644 index 0000000000..055bd1e2bb --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ice @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +# Override atm-only FV3 settings when ice model is on +export min_seaice="1.0e-6" +export use_cice_alb=".true." + +export MESH_ICE="mesh.mx${ICERES}.nc" + +export CICE_GRID="grid_cice_NEMS_mx${ICERES}.nc" +export CICE_MASK="kmtu_cice_NEMS_mx${ICERES}.nc" + +echo "END: config.ice" diff --git a/FV3GFSwfm/test_catchem/config.metp b/FV3GFSwfm/test_catchem/config.metp new file mode 100644 index 0000000000..8260d1c472 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.metp @@ -0,0 +1,100 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export model_dir=${ARCDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/test_catchem/config.mos b/FV3GFSwfm/test_catchem/config.mos new file mode 100644 index 0000000000..a74c7e7d21 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.mos ########## +echo "BEGIN: config.mos" + +# MOS package location +export HOMEgfs_mos=/lfs/h1/ops/prod/packages/gfs_mos.v${mos_ver} + +echo "END: config.mos" diff --git a/FV3GFSwfm/test_catchem/config.mos_ext_grd_fcst b/FV3GFSwfm/test_catchem/config.mos_ext_grd_fcst new file mode 100644 index 0000000000..db94af945f --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_ext_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_fcst ########## +echo "BEGIN: config.mos_ext_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_fcst" diff --git a/FV3GFSwfm/test_catchem/config.mos_ext_grd_prdgen b/FV3GFSwfm/test_catchem/config.mos_ext_grd_prdgen new file mode 100644 index 0000000000..ade31b0c1a --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_ext_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prdgen ########## +echo "BEGIN: config.mos_ext_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prdgen" diff --git a/FV3GFSwfm/test_catchem/config.mos_ext_grd_prep b/FV3GFSwfm/test_catchem/config.mos_ext_grd_prep new file mode 100644 index 0000000000..0ba14e2573 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_ext_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prep ########## +echo "BEGIN: config.mos_ext_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prep" diff --git a/FV3GFSwfm/test_catchem/config.mos_ext_stn_fcst b/FV3GFSwfm/test_catchem/config.mos_ext_stn_fcst new file mode 100644 index 0000000000..5b26d196f9 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_ext_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_fcst ########## +echo "BEGIN: config.mos_ext_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_fcst" diff --git a/FV3GFSwfm/test_catchem/config.mos_ext_stn_prdgen b/FV3GFSwfm/test_catchem/config.mos_ext_stn_prdgen new file mode 100644 index 0000000000..9f63eb56fd --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_ext_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prdgen ########## +echo "BEGIN: config.mos_ext_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prdgen" diff --git a/FV3GFSwfm/test_catchem/config.mos_ext_stn_prep b/FV3GFSwfm/test_catchem/config.mos_ext_stn_prep new file mode 100644 index 0000000000..c443503f11 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_ext_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prep ########## +echo "BEGIN: config.mos_ext_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prep" diff --git a/FV3GFSwfm/test_catchem/config.mos_grd_fcst b/FV3GFSwfm/test_catchem/config.mos_grd_fcst new file mode 100644 index 0000000000..bd0d50a04d --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_fcst ########## +echo "BEGIN: config.mos_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_fcst" diff --git a/FV3GFSwfm/test_catchem/config.mos_grd_prdgen b/FV3GFSwfm/test_catchem/config.mos_grd_prdgen new file mode 100644 index 0000000000..dd9ce8bcd8 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prdgen ########## +echo "BEGIN: config.mos_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prdgen" diff --git a/FV3GFSwfm/test_catchem/config.mos_grd_prep b/FV3GFSwfm/test_catchem/config.mos_grd_prep new file mode 100644 index 0000000000..8a3d334d0d --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prep ########## +echo "BEGIN: config.mos_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prep" diff --git a/FV3GFSwfm/test_catchem/config.mos_stn_fcst b/FV3GFSwfm/test_catchem/config.mos_stn_fcst new file mode 100644 index 0000000000..7cb266ea3a --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_fcst ########## +echo "BEGIN: config.mos_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_fcst" diff --git a/FV3GFSwfm/test_catchem/config.mos_stn_prdgen b/FV3GFSwfm/test_catchem/config.mos_stn_prdgen new file mode 100644 index 0000000000..f92edbd0fd --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prdgen ########## +echo "BEGIN: config.mos_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prdgen" diff --git a/FV3GFSwfm/test_catchem/config.mos_stn_prep b/FV3GFSwfm/test_catchem/config.mos_stn_prep new file mode 100644 index 0000000000..b236f42879 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prep ########## +echo "BEGIN: config.mos_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prep" diff --git a/FV3GFSwfm/test_catchem/config.mos_wx_ext_prdgen b/FV3GFSwfm/test_catchem/config.mos_wx_ext_prdgen new file mode 100644 index 0000000000..054cb950ad --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_wx_ext_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_ext_prdgen ########## +echo "BEGIN: config.mos_wx_ext_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_ext_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_ext_prdgen" diff --git a/FV3GFSwfm/test_catchem/config.mos_wx_prdgen b/FV3GFSwfm/test_catchem/config.mos_wx_prdgen new file mode 100644 index 0000000000..d4481b65fc --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.mos_wx_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_prdgen ########## +echo "BEGIN: config.mos_wx_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_prdgen" diff --git a/FV3GFSwfm/test_catchem/config.npoess b/FV3GFSwfm/test_catchem/config.npoess new file mode 100644 index 0000000000..9a388d2e6b --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.npoess @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.npoess ########## +# GFS NPOESS step specific + +echo "BEGIN: config.npoess" + +# Get task specific resources +. "${EXPDIR}/config.resources" npoess + +echo "END: config.npoess" diff --git a/FV3GFSwfm/test_catchem/config.nsst b/FV3GFSwfm/test_catchem/config.nsst new file mode 100644 index 0000000000..7bda81f058 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.nsst @@ -0,0 +1,39 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# Set NST_MODEL for JEDIATMVAR or JEDIATMENS +if [[ "${DO_JEDIATMVAR}" == "YES" || "${DO_JEDIATMENS}" == "YES" ]]; then + export NST_MODEL=1 +fi + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +cdate="${PDY}${cyc}" +if (( cdate < 2017072000 )); then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if (( NST_GSI > 0 )); then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/test_catchem/config.oceanice_products b/FV3GFSwfm/test_catchem/config.oceanice_products new file mode 100644 index 0000000000..9e5c5b1c68 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.oceanice_products @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.oceanice_products ########## + +echo "BEGIN: config.oceanice_products" + +# Get task specific resources +source "${EXPDIR}/config.resources" oceanice_products + +export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products.yaml" + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.oceanice_products" diff --git a/FV3GFSwfm/test_catchem/config.ocn b/FV3GFSwfm/test_catchem/config.ocn new file mode 100644 index 0000000000..317a76e58a --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocn @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +export MESH_OCN="mesh.mx${OCNRES}.nc" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi + +# Time interval for applying the increment +if [[ "${DOIAU}" == "YES" ]]; then + export ODA_INCUPD_NHOURS="6.0" +else + export ODA_INCUPD_NHOURS="3.0" +fi + + + +echo "END: config.ocn" diff --git a/FV3GFSwfm/test_catchem/config.ocnanal b/FV3GFSwfm/test_catchem/config.ocnanal new file mode 100644 index 0000000000..1294b429e9 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanal @@ -0,0 +1,26 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR="${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config" +export OBS_LIST=${PARMgfs}/gdas/soca/obs/obs_list.yaml +export OBS_YAML="${OBS_LIST}" +export FV3JEDI_STAGE_YAML="${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml" +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=100 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size resolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +export COMIN_OBS=@COMIN_OBS@ + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/test_catchem/config.ocnanalbmat b/FV3GFSwfm/test_catchem/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/test_catchem/config.ocnanalchkpt b/FV3GFSwfm/test_catchem/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/test_catchem/config.ocnanalecen b/FV3GFSwfm/test_catchem/config.ocnanalecen new file mode 100644 index 0000000000..b64c2bcf62 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanalecen @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalecen ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalecen" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalecen + +echo "END: config.ocnanalecen" diff --git a/FV3GFSwfm/test_catchem/config.ocnanalpost b/FV3GFSwfm/test_catchem/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/test_catchem/config.ocnanalprep b/FV3GFSwfm/test_catchem/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/test_catchem/config.ocnanalrun b/FV3GFSwfm/test_catchem/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/test_catchem/config.ocnanalvrfy b/FV3GFSwfm/test_catchem/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/test_catchem/config.postsnd b/FV3GFSwfm/test_catchem/config.postsnd new file mode 100644 index 0000000000..7ec0ad6321 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.postsnd @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/test_catchem/config.prep b/FV3GFSwfm/test_catchem/config.prep new file mode 100644 index 0000000000..6009280db0 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.prep @@ -0,0 +1,61 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +export TROPCYQCRELOSH="${SCRgfs}/exglobal_atmos_tropcy_qc_reloc.sh" + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=${FIXgfs}/gsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/test_catchem/config.prepatmiodaobs b/FV3GFSwfm/test_catchem/config.prepatmiodaobs new file mode 100644 index 0000000000..e29cf67b07 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.prepatmiodaobs @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.prepatmiodaobs ########## +# Atm Obs Prep specific + +echo "BEGIN: config.prepatmiodaobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" prepatmiodaobs + +echo "END: config.prepatmiodaobs" diff --git a/FV3GFSwfm/test_catchem/config.prepchem b/FV3GFSwfm/test_catchem/config.prepchem new file mode 100755 index 0000000000..0d8c54cf61 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.prepchem @@ -0,0 +1,23 @@ +#!/bin/ksh -x + +########## config.prepchem ########## +# PREPBUFR specific configuration + +echo "BEGIN: config.prepchem" + +# Get task specific resources +. $EXPDIR/config.resources prepchem + + +# Set prepchem variables + +# Set job and DATAROOT + +export job=${CDUMP}_prep_${cyc} +export EMIDIR="/scratch1/BMC/gsd-fv3-dev/lzhang/emi_" +#export EMIINPUT=/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop/emi_C" +#JKHexport EMIINPUT="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/scratch/tmp/emi_${CASE}" +#JKHexport PUBEMI="/scratch2/BMC/public/data/grids/nesdis/GBBEPx/${CASE}" +#JKHexport BINGB="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/scratch/tmp/GBBEPx" +#JKHexport NCGB="${EMIINPUT}/GBBEPx" +echo "END: config.prepchem" diff --git a/FV3GFSwfm/test_catchem/config.prepoceanobs b/FV3GFSwfm/test_catchem/config.prepoceanobs new file mode 100644 index 0000000000..977097acaf --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.prepoceanobs @@ -0,0 +1,20 @@ +#!/bin/bash + +########## config.prepoceanobs ########## + +echo "BEGIN: config.prepoceanobs" + +export OCNOBS2IODAEXEC=${HOMEgfs}/sorc/gdas.cd/build/bin/gdas_obsprovider2ioda.x + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBSPREP_YAML=${PARMgfs}/gdas/soca/obsprep/obsprep_config.yaml +export OBS_LIST=${PARMgfs}/gdas/soca/obs/obs_list.yaml +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} + +# ocean analysis needs own dmpdir until standard dmpdir has full ocean obs +export DMPDIR=/scratch1/NCEPDEV/global/glopara/data/experimental_obs + +# Get task specific resources +. "${EXPDIR}/config.resources" prepoceanobs +echo "END: config.prepoceanobs" diff --git a/FV3GFSwfm/test_catchem/config.prepsnowobs b/FV3GFSwfm/test_catchem/config.prepsnowobs new file mode 100644 index 0000000000..60ca16ce9e --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.prepsnowobs @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.prepsnowobs ########## +# Snow Obs Prep specific + +echo "BEGIN: config.prepsnowobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" prepsnowobs + +export GTS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_gts.yaml.j2" +export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" + +export BUFR2IODAX="${EXECgfs}/bufr2ioda.x" + +export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" +export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" + +export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" + +echo "END: config.prepsnowobs" diff --git a/FV3GFSwfm/test_catchem/config.resources b/FV3GFSwfm/test_catchem/config.resources new file mode 100644 index 0000000000..c405505498 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.resources @@ -0,0 +1,1213 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if (( $# != 1 )); then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "stage_ic aerosol_init" + echo "prep prepsnowobs prepatmiodaobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "snowanl" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst echgres" + echo "upp atmos_products" + echo "tracker genesis genesis_fsu" + echo "verfozn verfrad vminmon fit2obs metp arch cleanup" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic oceanice_products" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak npoess" + echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalecen ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +case ${machine} in + "WCOSS2") npe_node_max=128;; + "HERA") npe_node_max=40;; + "ORION") npe_node_max=40;; + "HERCULES") npe_node_max=80;; + "JET") + case ${PARTITION_BATCH} in + "xjet") npe_node_max=24;; + "vjet" | "sjet") npe_node_max=16;; + "kjet") npe_node_max=40;; + *) + echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}" + exit 3 + esac + ;; + "S4") + case ${PARTITION_BATCH} in + "s4") npe_node_max=32;; + "ivy") npe_node_max=20;; + *) + echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}" + exit 3 + esac + ;; + "AWSPW") + export PARTITION_BATCH="compute" + npe_node_max=40 + ;; + "CONTAINER") + npe_node_max=1 + ;; + *) + echo "FATAL ERROR: Unknown machine encountered by ${BASH_SOURCE[0]}" + exit 2 + ;; +esac +export npe_node_max + +case ${step} in + "prep") + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" == "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40GB" + fi + ;; + + "prepsnowobs") + export wtime_prepsnowobs="00:05:00" + export npe_prepsnowobs=1 + export nth_prepsnowobs=1 + export npe_node_prepsnowobs=1 + ;; + + "prepatmiodaobs") + export wtime_prepatmiodaobs="00:30:00" + export npe_prepatmiodaobs=1 + export nth_prepatmiodaobs=1 + export npe_node_prepatmiodaobs=$(( npe_node_max / nth_prepatmiodaobs )) + ;; + + "aerosol_init") + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + export npe_node_aerosol_init=$(( npe_node_max / nth_aerosol_init )) + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6GB" + ;; + + "prepchem") + export wtime_prepchem="00:25:00" + export npe_prepchem=2 + export nth_prepchem=1 + export npe_node_prepchem=$(( npe_node_max / nth_prepchem )) + export NTASKS=${npe_prepchem} + ;; + + "waveinit") + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + export npe_node_waveinit=$(( npe_node_max / nth_waveinit )) + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + ;; + + "waveprep") + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + export npe_node_waveprep=$(( npe_node_max / nth_waveprep )) + export npe_node_waveprep_gfs=$(( npe_node_max / nth_waveprep_gfs )) + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + ;; + + "wavepostsbs") + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$(( npe_node_max / nth_wavepostsbs )) + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + ;; + + "wavepostbndpnt") + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + ;; + + "wavepostbndpntbll") + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll )) + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + ;; + + "wavepostpnt") + export wtime_wavepostpnt="04:00:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt )) + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + ;; + + "wavegempak") + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$(( npe_node_max / nth_wavegempak )) + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + ;; + + "waveawipsbulls") + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(( npe_node_max / nth_waveawipsbulls )) + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + ;; + + "waveawipsgridded") + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(( npe_node_max / nth_waveawipsgridded )) + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + ;; + + "atmanlinit") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + export layout_gsib_x=$(( layout_x * 3 )) + export layout_gsib_y=$(( layout_y * 2 )) + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + export npe_node_atmanlinit=$(( npe_node_max / nth_atmanlinit )) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + ;; + + "atmanlrun") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + export wtime_atmanlrun="00:30:00" + export npe_atmanlrun=$(( layout_x * layout_y * 6 )) + export npe_atmanlrun_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + export npe_node_atmanlrun=$(( npe_node_max / nth_atmanlrun )) + export memory_atmanlrun="96GB" + export is_exclusive=True + ;; + + "atmanlfinal") + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + export npe_node_atmanlfinal=$(( npe_node_max / nth_atmanlfinal )) + export is_exclusive=True + ;; + + "snowanl") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") + layout_x=6 + layout_y=6 + ;; + "C384") + layout_x=5 + layout_y=5 + ;; + "C192" | "C96" | "C48") + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + export wtime_snowanl="00:15:00" + export npe_snowanl=$(( layout_x * layout_y * 6 )) + export nth_snowanl=1 + export npe_node_snowanl=$(( npe_node_max / nth_snowanl )) + ;; + + "aeroanlinit") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") + layout_x=8 + layout_y=8 + ;; + "C384") + layout_x=8 + layout_y=8 + ;; + "C192" | "C96") + layout_x=8 + layout_y=8 + ;; + "C48" ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + export npe_node_aeroanlinit=$(( npe_node_max / nth_aeroanlinit )) + export memory_aeroanlinit="3072M" + ;; + + "aeroanlrun") + case ${CASE} in + "C768") + layout_x=8 + layout_y=8 + ;; + "C384") + layout_x=8 + layout_y=8 + ;; + "C192" | "C96") + layout_x=8 + layout_y=8 + ;; + "C48" ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + export npe_aeroanlrun=$(( layout_x * layout_y * 6 )) + export npe_aeroanlrun_gfs=$(( layout_x * layout_y * 6 )) + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + export npe_node_aeroanlrun=$(( npe_node_max / nth_aeroanlrun )) + export is_exclusive=True + ;; + + "aeroanlfinal") + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + export npe_node_aeroanlfinal=$(( npe_node_max / nth_aeroanlfinal )) + export memory_aeroanlfinal="3072M" + ;; + + "ocnanalprep") + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + export npe_node_ocnanalprep=$(( npe_node_max / nth_ocnanalprep )) + export memory_ocnanalprep="24GB" + ;; + + "prepoceanobs") + export wtime_prepoceanobs="00:10:00" + export npe_prepoceanobs=1 + export nth_prepoceanobs=1 + export npe_node_prepoceanobs=$(( npe_node_max / nth_prepoceanobs )) + export memory_prepoceanobs="48GB" + ;; + + "ocnanalbmat") + npes=16 + case ${OCNRES} in + "025") npes=480;; + "050") npes=16;; + "500") npes=16;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + export npe_node_ocnanalbmat=$(( npe_node_max / nth_ocnanalbmat )) + ;; + + "ocnanalrun") + npes=16 + case ${OCNRES} in + "025") + npes=480 + memory_ocnanalrun="96GB" + ;; + "050") + npes=16 + memory_ocnanalrun="96GB" + ;; + "500") + npes=16 + memory_ocnanalrun="24GB" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + + export wtime_ocnanalrun="00:15:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + export npe_node_ocnanalrun=$(( npe_node_max / nth_ocnanalrun )) + export memory_ocnanalrun + ;; + + "ocnanalecen") + npes=16 + case ${OCNRES} in + "025") + npes=40 + memory_ocnanalecen="96GB" + ;; + "050") + npes=16 + memory_ocnanalecen="96GB" + ;; + "500") + npes=16 + memory_ocnanalecen="24GB" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + + export wtime_ocnanalecen="00:10:00" + export npe_ocnanalecen=${npes} + export nth_ocnanalecen=1 + export is_exclusive=True + export npe_node_ocnanalecen=$(( npe_node_max / nth_ocnanalecen )) + export memory_ocnanalecen + ;; + + "ocnanalchkpt") + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + export npe_node_ocnanalchkpt=$(( npe_node_max / nth_ocnanalchkpt )) + case ${OCNRES} in + "025") + memory_ocnanalchkpt="128GB" + npes=40;; + "050") + memory_ocnanalchkpt="32GB" + npes=16;; + "500") + memory_ocnanalchkpt="32GB" + npes=8;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${OCNRES}" + exit 4 + esac + export npe_ocnanalchkpt=${npes} + export memory_ocnanalchkpt + ;; + + "ocnanalpost") + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + export npe_node_ocnanalpost=$(( npe_node_max / nth_ocnanalpost )) + ;; + + "ocnanalvrfy") + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + export npe_node_ocnanalvrfy=$(( npe_node_max / nth_ocnanalvrfy )) + export memory_ocnanalvrfy="24GB" + ;; + + "anal") + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" == "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + case ${CASE} in + "C384") + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + ;; + "C192" | "C96" | "C48") + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} == "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} == "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} == "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_node_anal=$(( npe_node_max / nth_anal )) + export nth_cycle=${nth_anal} + export npe_node_cycle=$(( npe_node_max / nth_cycle )) + export is_exclusive=True + ;; + + "analcalc") + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$(( npe_node_max / nth_analcalc )) + export is_exclusive=True + export memory_analcalc="48GB" + ;; + + "analdiag") + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$(( npe_node_max / nth_analdiag )) + export memory_analdiag="48GB" + ;; + + "sfcanl") + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + export npe_node_sfcanl=$(( npe_node_max / nth_sfcanl )) + export is_exclusive=True + ;; + + "fcst" | "efcs") + export is_exclusive=True + + if [[ "${step}" == "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" == "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" == ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-${FV3PETS}} + (( "${MEDPETS}" > 300 )) && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + CHMPETS=0; CHMTHREADS=0 + if [[ "${DO_AERO}" == "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + export CHMPETS CHMTHREADS + + CHMPETS=0; CHMTHREADS=0 + if [[ "${DO_CATChem}" == "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + echo "CATChem using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + export CHMPETS CHMTHREADS + + WAVPETS=0; WAVTHREADS=0 + if [[ "${DO_WAVE}" == "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + export WAVPETS WAVTHREADS + + OCNPETS=0; OCNTHREADS=0 + if [[ "${DO_OCN}" == "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + export OCNPETS OCNTHREADS + + ICEPETS=0; ICETHREADS=0 + if [[ "${DO_ICE}" == "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + export ICEPETS ICETHREADS + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:15:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + "C768" | "C1152") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + ;; + + "oceanice_products") + export wtime_oceanice_products="00:15:00" + export npe_oceanice_products=1 + export npe_node_oceanice_products=1 + export nth_oceanice_products=1 + export memory_oceanice_products="96GB" + ;; + + "upp") + case "${CASE}" in + "C48" | "C96") + export npe_upp=${CASE:1} + ;; + "C192" | "C384" | "C768") + export npe_upp=120 + export memory_upp="48GB" + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_node_upp=${npe_upp} + + export nth_upp=1 + + export wtime_upp="00:15:00" + if (( npe_node_upp > npe_node_max )); then + export npe_node_upp=${npe_node_max} + fi + export is_exclusive=True + ;; + + "atmos_products") + export wtime_atmos_products="00:15:00" + export npe_atmos_products=24 + export nth_atmos_products=1 + export npe_node_atmos_products="${npe_atmos_products}" + export wtime_atmos_products_gfs="${wtime_atmos_products}" + export npe_atmos_products_gfs="${npe_atmos_products}" + export nth_atmos_products_gfs="${nth_atmos_products}" + export npe_node_atmos_products_gfs="${npe_node_atmos_products}" + export is_exclusive=True + ;; + + "verfozn") + export wtime_verfozn="00:05:00" + export npe_verfozn=1 + export nth_verfozn=1 + export npe_node_verfozn=1 + export memory_verfozn="1G" + ;; + + "verfrad") + export wtime_verfrad="00:40:00" + export npe_verfrad=1 + export nth_verfrad=1 + export npe_node_verfrad=1 + export memory_verfrad="5G" + ;; + + "vminmon") + export wtime_vminmon="00:05:00" + export npe_vminmon=1 + export nth_vminmon=1 + export npe_node_vminmon=1 + export wtime_vminmon_gfs="00:05:00" + export npe_vminmon_gfs=1 + export nth_vminmon_gfs=1 + export npe_node_vminmon_gfs=1 + export memory_vminmon="1G" + ;; + + "tracker") + export wtime_tracker="00:10:00" + export npe_tracker=1 + export nth_tracker=1 + export npe_node_tracker=1 + export memory_tracker="4G" + ;; + + "genesis") + export wtime_genesis="00:25:00" + export npe_genesis=1 + export nth_genesis=1 + export npe_node_genesis=1 + export memory_genesis="10G" + ;; + + "genesis_fsu") + export wtime_genesis_fsu="00:10:00" + export npe_genesis_fsu=1 + export nth_genesis_fsu=1 + export npe_node_genesis_fsu=1 + export memory_genesis_fsu="10G" + ;; + + "fit2obs") + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + ;; + + "metp") + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + ;; + + "echgres") + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" == "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + ;; + + "init") + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70GB" + ;; + + "init_chem") + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + ;; + + "mom6ic") + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + ;; + + "arch" | "earc" | "getic") + declare -x "wtime_${step}"="06:00:00" + declare -x "npe_${step}"="1" + declare -x "npe_node_${step}"="1" + declare -x "nth_${step}"="1" + declare -x "memory_${step}"="4096M" + if [[ "${machine}" == "WCOSS2" ]]; then + declare -x "memory_${step}"="50GB" + fi + ;; + + "cleanup") + export wtime_cleanup="00:15:00" + export npe_cleanup=1 + export npe_node_cleanup=1 + export nth_cleanup=1 + export memory_cleanup="4096M" + ;; + + "stage_ic") + export wtime_stage_ic="00:15:00" + export npe_stage_ic=1 + export npe_node_stage_ic=1 + export nth_stage_ic=1 + export is_exclusive=True + ;; + + "atmensanlinit") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + export npe_node_atmensanlinit=$(( npe_node_max / nth_atmensanlinit )) + export memory_atmensanlinit="3072M" + ;; + + "atmensanlrun") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + export wtime_atmensanlrun="00:30:00" + export npe_atmensanlrun=$(( layout_x * layout_y * 6 )) + export npe_atmensanlrun_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + export npe_node_atmensanlrun=$(( npe_node_max / nth_atmensanlrun )) + export memory_atmensanlrun="96GB" + export is_exclusive=True + ;; + + "atmensanlfinal") + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + export npe_node_atmensanlfinal=$(( npe_node_max / nth_atmensanlfinal )) + export is_exclusive=True + ;; + + "eobs" | "eomg") + export wtime_eobs="00:15:00" + export wtime_eomg="00:30:00" + case ${CASE} in + "C768") export npe_eobs=200;; + "C384") export npe_eobs=100;; + "C192" | "C96" | "C48") export npe_eobs=40;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + export npe_node_eobs=$(( npe_node_max / nth_eobs )) + export is_exclusive=True + # The number of tasks and cores used must be the same for eobs + # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details + # For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + elif [[ ${machine} = "HERCULES" ]]; then + # For Hercules, this is only an issue at C384; use 20 tasks/node + if [[ ${CASE} = "C384" ]]; then + export npe_node_eobs=20 + fi + fi + export npe_node_eomg=${npe_node_eobs} + ;; + + "ediag") + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$(( npe_node_max / nth_ediag )) + export memory_ediag="30GB" + ;; + + "eupd") + export wtime_eupd="00:30:00" + case ${CASE} in + "C768") + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" == "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + ;; + "C384") + export npe_eupd=270 + export nth_eupd=8 + if [[ "${machine}" == "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ ${machine} == "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + ;; + "C192" | "C96" | "C48") + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" == "HERA" || "${machine}" == "JET" ]]; then + export nth_eupd=4 + fi + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}" + exit 4 + ;; + esac + export npe_node_eupd=$(( npe_node_max / nth_eupd )) + export is_exclusive=True + ;; + + "ecen") + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" == "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} == "C384" || ${CASE} == "C192" || ${CASE} == "C96" || ${CASE} == "C48" ]]; then + export nth_ecen=2 + fi + export npe_node_ecen=$(( npe_node_max / nth_ecen )) + export nth_cycle=${nth_ecen} + export npe_node_cycle=$(( npe_node_max / nth_cycle )) + export is_exclusive=True + ;; + + "esfc") + export wtime_esfc="00:08:00" + export npe_esfc=80 + export nth_esfc=1 + export npe_node_esfc=$(( npe_node_max / nth_esfc )) + export nth_cycle=${nth_esfc} + export npe_node_cycle=$(( npe_node_max / nth_cycle )) + export memory_esfc="80GB" + ;; + + "epos") + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=1 + export npe_node_epos=$(( npe_node_max / nth_epos )) + export is_exclusive=True + ;; + + "postsnd") + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(( npe_node_postsnd * nth_postsnd )) + if (( postsnd_req_cores > npe_node_max )); then + export npe_node_postsnd=$(( npe_node_max / nth_postsnd )) + fi + export is_exclusive=True + ;; + + "awips") + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + ;; + + "npoess") + export wtime_npoess="03:30:00" + export npe_npoess=1 + export npe_node_npoess=1 + export nth_npoess=1 + export memory_npoess="3GB" + ;; + + "gempak") + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + ;; + + "mos_stn_prep") + export wtime_mos_stn_prep="00:10:00" + export npe_mos_stn_prep=3 + export npe_node_mos_stn_prep=3 + export nth_mos_stn_prep=1 + export memory_mos_stn_prep="5GB" + export NTASK="${npe_mos_stn_prep}" + export PTILE="${npe_node_mos_stn_prep}" + ;; + + "mos_grd_prep") + export wtime_mos_grd_prep="00:10:00" + export npe_mos_grd_prep=4 + export npe_node_mos_grd_prep=4 + export nth_mos_grd_prep=1 + export memory_mos_grd_prep="16GB" + export NTASK="${npe_mos_grd_prep}" + export PTILE="${npe_node_mos_grd_prep}" + ;; + + "mos_ext_stn_prep") + export wtime_mos_ext_stn_prep="00:15:00" + export npe_mos_ext_stn_prep=2 + export npe_node_mos_ext_stn_prep=2 + export nth_mos_ext_stn_prep=1 + export memory_mos_ext_stn_prep="5GB" + export NTASK="${npe_mos_ext_stn_prep}" + export PTILE="${npe_node_mos_ext_stn_prep}" + ;; + + "mos_ext_grd_prep") + export wtime_mos_ext_grd_prep="00:10:00" + export npe_mos_ext_grd_prep=7 + export npe_node_mos_ext_grd_prep=7 + export nth_mos_ext_grd_prep=1 + export memory_mos_ext_grd_prep="3GB" + export NTASK="${npe_mos_ext_grd_prep}" + export PTILE="${npe_node_mos_ext_grd_prep}" + ;; + + "mos_stn_fcst") + export wtime_mos_stn_fcst="00:10:00" + export npe_mos_stn_fcst=5 + export npe_node_mos_stn_fcst=5 + export nth_mos_stn_fcst=1 + export memory_mos_stn_fcst="40GB" + export NTASK="${npe_mos_stn_fcst}" + export PTILE="${npe_node_mos_stn_fcst}" + ;; + + "mos_grd_fcst") + export wtime_mos_grd_fcst="00:10:00" + export npe_mos_grd_fcst=7 + export npe_node_mos_grd_fcst=7 + export nth_mos_grd_fcst=1 + export memory_mos_grd_fcst="50GB" + export NTASK="${npe_mos_grd_fcst}" + export PTILE="${npe_node_mos_grd_fcst}" + ;; + + "mos_ext_stn_fcst") + export wtime_mos_ext_stn_fcst="00:20:00" + export npe_mos_ext_stn_fcst=3 + export npe_node_mos_ext_stn_fcst=3 + export nth_mos_ext_stn_fcst=1 + export memory_mos_ext_stn_fcst="50GB" + export NTASK="${npe_mos_ext_stn_fcst}" + export PTILE="${npe_node_mos_ext_stn_fcst}" + export prepost=True + ;; + + "mos_ext_grd_fcst") + export wtime_mos_ext_grd_fcst="00:10:00" + export npe_mos_ext_grd_fcst=7 + export npe_node_mos_ext_grd_fcst=7 + export nth_mos_ext_grd_fcst=1 + export memory_mos_ext_grd_fcst="50GB" + export NTASK="${npe_mos_ext_grd_fcst}" + export PTILE="${npe_node_mos_ext_grd_fcst}" + ;; + + "mos_stn_prdgen") + export wtime_mos_stn_prdgen="00:10:00" + export npe_mos_stn_prdgen=1 + export npe_node_mos_stn_prdgen=1 + export nth_mos_stn_prdgen=1 + export memory_mos_stn_prdgen="15GB" + export NTASK="${npe_mos_stn_prdgen}" + export PTILE="${npe_node_mos_stn_prdgen}" + export prepost=True + ;; + + "mos_grd_prdgen") + export wtime_mos_grd_prdgen="00:40:00" + export npe_mos_grd_prdgen=72 + export npe_node_mos_grd_prdgen=18 + export nth_mos_grd_prdgen=4 + export memory_mos_grd_prdgen="20GB" + export NTASK="${npe_mos_grd_prdgen}" + export PTILE="${npe_node_mos_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_grd_prdgen}" + ;; + + "mos_ext_stn_prdgen") + export wtime_mos_ext_stn_prdgen="00:10:00" + export npe_mos_ext_stn_prdgen=1 + export npe_node_mos_ext_stn_prdgen=1 + export nth_mos_ext_stn_prdgen=1 + export memory_mos_ext_stn_prdgen="15GB" + export NTASK="${npe_mos_ext_stn_prdgen}" + export PTILE="${npe_node_mos_ext_stn_prdgen}" + export prepost=True + ;; + + "mos_ext_grd_prdgen") + export wtime_mos_ext_grd_prdgen="00:30:00" + export npe_mos_ext_grd_prdgen=96 + export npe_node_mos_ext_grd_prdgen=6 + export nth_mos_ext_grd_prdgen=16 + export memory_mos_ext_grd_prdgen="30GB" + export NTASK="${npe_mos_ext_grd_prdgen}" + export PTILE="${npe_node_mos_ext_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_ext_grd_prdgen}" + ;; + + "mos_wx_prdgen") + export wtime_mos_wx_prdgen="00:10:00" + export npe_mos_wx_prdgen=4 + export npe_node_mos_wx_prdgen=2 + export nth_mos_wx_prdgen=2 + export memory_mos_wx_prdgen="10GB" + export NTASK="${npe_mos_wx_prdgen}" + export PTILE="${npe_node_mos_wx_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_prdgen}" + ;; + + "mos_wx_ext_prdgen") + export wtime_mos_wx_ext_prdgen="00:10:00" + export npe_mos_wx_ext_prdgen=4 + export npe_node_mos_wx_ext_prdgen=2 + export nth_mos_wx_ext_prdgen=2 + export memory_mos_wx_ext_prdgen="10GB" + export NTASK="${npe_mos_wx_ext_prdgen}" + export PTILE="${npe_node_mos_wx_ext_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_ext_prdgen}" + ;; + + *) + echo "FATAL ERROR: Invalid job ${step} passed to ${BASH_SOURCE[0]}" + exit 1 + ;; + +esac + +echo "END: config.resources" diff --git a/FV3GFSwfm/test_catchem/config.sfcanl b/FV3GFSwfm/test_catchem/config.sfcanl new file mode 100644 index 0000000000..e2fde8992a --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.sfcanl @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +# Turn off NST in JEDIATMVAR +if [[ "${DO_JEDIATMVAR}" == "YES" ]]; then + export DONST="NO" +fi + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/test_catchem/config.snowanl b/FV3GFSwfm/test_catchem/config.snowanl new file mode 100644 index 0000000000..980036cf7b --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.snowanl @@ -0,0 +1,30 @@ +#! /usr/bin/env bash + +########## config.snowanl ########## +# configuration common to snow analysis tasks + +echo "BEGIN: config.snowanl" + +# Get task specific resources +source "${EXPDIR}/config.resources" snowanl + +export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" + +# Name of the JEDI executable and its yaml template +export JEDIEXE="${EXECgfs}/fv3jedi_letkf.x" +export JEDIYAML="${PARMgfs}/gdas/snow/letkfoi/letkfoi.yaml.j2" + +# Ensemble member properties +export SNOWDEPTHVAR="snodl" +export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI + +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" +export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" + +export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" + +export io_layout_x=1 +export io_layout_y=1 + +echo "END: config.snowanl" diff --git a/FV3GFSwfm/test_catchem/config.stage_ic b/FV3GFSwfm/test_catchem/config.stage_ic new file mode 100644 index 0000000000..63d0e4a5cf --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.stage_ic @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +########## config.stage_ic ########## + +echo "BEGIN: config.stage_ic" + +# Get task specific resources +source "${EXPDIR}/config.resources" stage_ic + +case "${CASE}" in + "C48" | "C96" | "C192") + export CPL_ATMIC="workflow_${CASE}_refactored" + export CPL_ICEIC="workflow_${CASE}_refactored" + export CPL_OCNIC="workflow_${CASE}_refactored" + export CPL_WAVIC="workflow_${CASE}_refactored" + ;; + "C384") + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c_refactored + export CPL_ICEIC=CPC_refactored + export CPL_OCNIC=CPC3Dvar_refactored + export CPL_WAVIC=workflow_C384_refactored + ;; + "C768") + export CPL_ATMIC=HR3C768 + export CPL_ICEIC=HR3marine + export CPL_OCNIC=HR3marine + export CPL_WAVIC=HR3marine + ;; + "C1152") + export CPL_ATMIC=HR3C1152 + export CPL_ICEIC=HR3marine + export CPL_OCNIC=HR3marine + export CPL_WAVIC=HR3marine + ;; + *) + echo "FATAL ERROR Unrecognized resolution: ${CASE}" + exit 1 + ;; +esac + +echo "END: config.stage_ic" diff --git a/FV3GFSwfm/test_catchem/config.tracker b/FV3GFSwfm/test_catchem/config.tracker new file mode 100644 index 0000000000..71fcf9196d --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.tracker @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.tracker ########## +echo "BEGIN: config.tracker" + +# Get task specific resources +. "${EXPDIR}/config.resources" tracker + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.tracker" diff --git a/FV3GFSwfm/test_catchem/config.tropcy b/FV3GFSwfm/test_catchem/config.tropcy new file mode 100644 index 0000000000..718abe3be5 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.tropcy @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.tropcy ########## +echo "BEGIN: config.tropcy" + +# Tracker/genesis package location +export HOMEens_tracker=${BASE_GIT}/TC_tracker/${ens_tracker_ver} + +export SENDCOM="YES" # Needed by tracker scripts still + +export FHOUT_CYCLONE=6 +FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) +export FHMAX_CYCLONE + +echo "END: config.tropcy" diff --git a/FV3GFSwfm/test_catchem/config.ufs b/FV3GFSwfm/test_catchem/config.ufs new file mode 100644 index 0000000000..635868dcf8 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.ufs @@ -0,0 +1,512 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if (( $# <= 1 )); then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" + echo "--gocart" + echo "--catchem" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_gocart=true +skip_catchem=true +skip_mediator=true + +# Loop through named arguments +while (( $# > 0 )); do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + shift + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + shift + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + shift + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + shift + ;; + "--gocart") + skip_gocart=false + shift + ;; + "--catchem") + skip_catchem=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=2 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplchp=".false." +export cplwav=".false." +export cplwav2atm=".false." +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1" +model_list="atm" + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export cpl=".true." + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + source "${EXPDIR}/config.ocn" + export cplflx=".true." + model_list="${model_list}.ocean" + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.1deg.nc" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + eps_imesh="2.5e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' + ;; + "050") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export TOPOEDITS + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING + export eps_imesh + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + source "${EXPDIR}/config.ice" + export cplice=".true." + model_list="${model_list}.ice" + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + source "${EXPDIR}/config.wave" + export cplwav=".true." + export cplwav2atm=".true." + model_list="${model_list}.wave" + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "glo_200") + ntasks_ww3=30 + nthreads_ww3=1 + ;; + "glo_500") + ntasks_ww3=12 + nthreads_ww3=1 + ;; + "mx025") + ntasks_ww3=80 + ;; + "uglo_100km") + ntasks_ww3=40 + nthreads_ww3=1 + ;; + "uglo_m1g16") + ntasks_ww3=1000 + nthreads_ww3=1 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +# GOCART specific settings +if [[ "${skip_gocart}" == "false" ]]; then + source "${EXPDIR}/config.aero" + export cplchm=".true." + model_list="${model_list}.aero" +fi + +# CATChem specific settings +if [[ "${skip_catchem}" == "false" ]]; then + source "${EXPDIR}/config.catchem" + export cplchp=".true." + export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_catchem" + export GBDAY=1 +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1_catchem" + export GBDAY=35 +fi + + model_list="${model_list}.catchem" +fi + + +# Set the name of the UFS (previously nems) configure template to use +# Default ufs.configure templates for supported model configurations +case "${model_list}" in + atm) + default_template="${PARMgfs}/ufs/ufs.configure.atm.IN" + ;; + atm.aero) + default_template="${PARMgfs}/ufs/ufs.configure.atmaero.IN" + ;; + atm.catchem) + default_template="${HOMEgfs}/parm/ufs/ufs.configure.atmcatchem.IN" + ;; + atm.wave) + default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav.IN" + ;; + atm.ocean.ice) + default_template="${PARMgfs}/ufs/ufs.configure.s2s_esmf.IN" + ;; + atm.ocean.ice.aero) + default_template="${PARMgfs}/ufs/ufs.configure.s2sa_esmf.IN" + ;; + atm.ocean.ice.catchem) + default_template="${HOMEgfs}/parm/ufs/ufs.configure.s2scatchem_esmf.IN" + ;; + atm.ocean.ice.wave) + default_template="${PARMgfs}/ufs/ufs.configure.s2sw_esmf.IN" + ;; + atm.ocean.ice.wave.aero) + default_template="${PARMgfs}/ufs/ufs.configure.s2swa_esmf.IN" + ;; + atm.ocean.ice.wave.catchem) + default_template="${HOMEgfs}/parm/ufs/ufs.configure.s2swcatchem_esmf.IN" + ;; + *) + echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" + exit 16 + ;; +esac + +# Allow user to override the default template +export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} +unset model_list default_template + +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." + exit 17 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/test_catchem/config.upp b/FV3GFSwfm/test_catchem/config.upp new file mode 100644 index 0000000000..41015c2fee --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.upp @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.upp ########## +# UPP specific + +echo "BEGIN: config.upp" + +# Get task specific resources +. "${EXPDIR}/config.resources" upp + +export UPP_CONFIG="${PARMgfs}/post/upp.yaml" + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.upp" diff --git a/FV3GFSwfm/test_catchem/config.verfozn b/FV3GFSwfm/test_catchem/config.verfozn new file mode 100644 index 0000000000..df7d18012d --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.verfozn @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.verfozn ########## +echo "BEGIN: config.verfozn" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfozn + +export DO_DATA_RPT=1 +export OZN_AREA="glb" +export OZNMON_SUFFIX=${NET} +export SATYPE_FILE=${PARMgfs}/monitor/gdas_oznmon_satype.txt + +# Source the parm file +. "${PARMgfs}/monitor/gdas_oznmon.parm" + +# Set up validation file +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + export ozn_val_file=${PARMgfs}/monitor/gdas_oznmon_base.tar +fi + +echo "END: config.verfozn" diff --git a/FV3GFSwfm/test_catchem/config.verfrad b/FV3GFSwfm/test_catchem/config.verfrad new file mode 100644 index 0000000000..506ce50b4f --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.verfrad @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +########## config.verfrad ########## +echo "BEGIN: config.verfrad" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfrad + +export satype_file=${PARMgfs}/monitor/gdas_radmon_satype.txt + +# Source the parm file +. "${PARMgfs}/monitor/da_mon.parm" + +# Other variables +export RAD_AREA="glb" +export MAKE_CTL=1 +export MAKE_DATA=1 +export USE_ANL=1 +export DO_DIAG_RPT=1 +export DO_DATA_RPT=1 + +export RADMON_SUFFIX=${RUN} +export CYCLE_INTERVAL=${assim_freq:-6} +export VERBOSE="YES" + +echo "END: config.verfrad" diff --git a/FV3GFSwfm/test_catchem/config.vminmon b/FV3GFSwfm/test_catchem/config.vminmon new file mode 100644 index 0000000000..7c7d362161 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.vminmon @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.vminmon ########## +echo "BEGIN: config.vminmon" + +# Get task specific resources +. "${EXPDIR}/config.resources" vminmon + +export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} +export CYCLE_INTERVAL=${assim_freq:-6} + +export mm_gnormfile=${PARMgfs}/monitor/${RUN}_minmon_gnorm.txt +export mm_costfile=${PARMgfs}/monitor/${RUN}_minmon_cost.txt + +echo "END: config.vminmon" diff --git a/FV3GFSwfm/test_catchem/config.wave b/FV3GFSwfm/test_catchem/config.wave new file mode 100644 index 0000000000..6fbce69996 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.wave @@ -0,0 +1,207 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD='' + export wavepostGRD='gwes_30m' + export waveuoutpGRD=${waveGRD} + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD='' + export wavepostGRD='glo_025' + export waveuoutpGRD=${waveGRD} + ;; + "glo_200") + #Global regular lat/lon 2deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_200' + export waveuoutpGRD=${waveGRD} + ;; + "glo_500") + #Global regular lat/lon 5deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_500' + export waveuoutpGRD=${waveGRD} + ;; + "uglo_100km") + #unstructured 100km grid + export waveinterpGRD='glo_200' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "uglo_m1g16") + #unstructured m1v16 grid + export waveinterpGRD='glo_15mxt' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + *) + echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." + exit 1 + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [[ "${CDUMP}" = "gdas" ]]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=${FHMAX_GFS} +fi +export WAVHINDH=0 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) +export DTPNT_WAV=3600 +export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) + +# Selected output parameters (gridded) +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" + +# Restart file config +if [[ "${CDUMP}" = "gdas" ]]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=48 # RTOFS forecasts only out to 8 days +elif [[ ${gfs_cyc} -ne 0 ]]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [[ "${CDUMP}" != gfs ]]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) # TODO: This calculation needs to move to parsing_namelists_WW3.sh + if [[ ${rst_dt_gfs} -gt 0 ]]; then + export DT_1_RST_WAV=0 #${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + #temporarily set to zero to avoid a clash in requested restart times + #which makes the wave model crash a fix for the model issue will be coming + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [[ ${RUNMEM} = -1 ]]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB="${RUNMEM: -2}" +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ ${DO_ICE} == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ ${DO_OCN} == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/test_catchem/config.waveawipsbulls b/FV3GFSwfm/test_catchem/config.waveawipsbulls new file mode 100644 index 0000000000..65a8d5076b --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.waveawipsbulls @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/test_catchem/config.waveawipsgridded b/FV3GFSwfm/test_catchem/config.waveawipsgridded new file mode 100644 index 0000000000..bd7c7c11e4 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.waveawipsgridded @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/test_catchem/config.wavegempak b/FV3GFSwfm/test_catchem/config.wavegempak new file mode 100644 index 0000000000..bcbec91f07 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.wavegempak @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/test_catchem/config.waveinit b/FV3GFSwfm/test_catchem/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/test_catchem/config.wavepostbndpnt b/FV3GFSwfm/test_catchem/config.wavepostbndpnt new file mode 100644 index 0000000000..412c5fb42a --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/test_catchem/config.wavepostbndpntbll b/FV3GFSwfm/test_catchem/config.wavepostbndpntbll new file mode 100644 index 0000000000..6695ab0f84 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/test_catchem/config.wavepostpnt b/FV3GFSwfm/test_catchem/config.wavepostpnt new file mode 100644 index 0000000000..e87237da82 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/test_catchem/config.wavepostsbs b/FV3GFSwfm/test_catchem/config.wavepostsbs new file mode 100644 index 0000000000..b3c5902e3c --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +source "${EXPDIR}/config.resources" wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -n "${waveinterpGRD}" ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/test_catchem/config.waveprep b/FV3GFSwfm/test_catchem/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/test_catchem/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/test_catchem/jkhINFO b/FV3GFSwfm/test_catchem/jkhINFO new file mode 100644 index 0000000000..0745897dcf --- /dev/null +++ b/FV3GFSwfm/test_catchem/jkhINFO @@ -0,0 +1 @@ + use /scratch2/BMC/gsd-fv3-dev/NCEPDEV/global/Kate.Zhang/fv3gfs/expdir/CAT_C96_CCPP/CAT_C96_CCPP.xml as template diff --git a/FV3GFSwfm/test_catchem/logs/2024022100.log b/FV3GFSwfm/test_catchem/logs/2024022100.log new file mode 100644 index 0000000000..9a0da7fae1 --- /dev/null +++ b/FV3GFSwfm/test_catchem/logs/2024022100.log @@ -0,0 +1,4 @@ +2024-03-28 16:20:04 +0000 :: hfe12 :: Submitting gfsstage_ic +2024-03-28 16:20:05 +0000 :: hfe12 :: Submission of gfsstage_ic succeeded, jobid=57779671 +2024-03-28 16:20:44 +0000 :: hfe12 :: Task gfsstage_ic, jobid=57779671, in state RUNNING (RUNNING) +2024-03-28 16:20:44 +0000 :: hfe12 :: This cycle is complete: Success diff --git a/FV3GFSwfm/test_catchem/logs/2024022200.log b/FV3GFSwfm/test_catchem/logs/2024022200.log new file mode 100644 index 0000000000..92cbdc47ca --- /dev/null +++ b/FV3GFSwfm/test_catchem/logs/2024022200.log @@ -0,0 +1,4 @@ +2024-03-28 16:20:05 +0000 :: hfe12 :: Submitting gfsstage_ic +2024-03-28 16:20:05 +0000 :: hfe12 :: Submission of gfsstage_ic succeeded, jobid=57779672 +2024-03-28 16:20:44 +0000 :: hfe12 :: Task gfsstage_ic, jobid=57779672, in state RUNNING (RUNNING) +2024-03-28 16:20:44 +0000 :: hfe12 :: This cycle is complete: Success diff --git a/FV3GFSwfm/test_catchem/logs/2024032100.log b/FV3GFSwfm/test_catchem/logs/2024032100.log new file mode 100644 index 0000000000..12261ff67b --- /dev/null +++ b/FV3GFSwfm/test_catchem/logs/2024032100.log @@ -0,0 +1,71 @@ +2024-03-28 16:22:38 +0000 :: hfe12 :: Submitting gfsstage_ic +2024-03-28 16:22:39 +0000 :: hfe12 :: Submitting prepchem +2024-03-28 16:22:39 +0000 :: hfe12 :: Submission status of gfsstage_ic is pending at druby://hfe12:44807 +2024-03-28 16:22:39 +0000 :: hfe12 :: Submission status of prepchem is pending at druby://hfe12:44807 +2024-03-28 16:25:04 +0000 :: hfe06 :: Submission status of previously pending gfsstage_ic is success, jobid=57779746 +2024-03-28 16:25:04 +0000 :: hfe06 :: Submission status of previously pending prepchem is success, jobid=57779747 +2024-03-28 16:25:04 +0000 :: hfe06 :: Task gfsstage_ic, jobid=57779746, in state SUCCEEDED (COMPLETED), ran for 36.0 seconds, exit status=0, try=1 (of 2) +2024-03-28 16:25:04 +0000 :: hfe06 :: Task prepchem, jobid=57779747, in state FAILED (FAILED), ran for 23.0 seconds, exit status=1, try=1 (of 2) +2024-03-28 16:25:04 +0000 :: hfe06 :: Submitting prepchem +2024-03-28 16:25:04 +0000 :: hfe06 :: Submission status of prepchem is pending at druby://hfe06:40093 +2024-03-28 16:30:04 +0000 :: hfe07 :: Submission status of previously pending prepchem is success, jobid=57779777 +2024-03-28 16:30:04 +0000 :: hfe07 :: Task prepchem, jobid=57779777, in state DEAD (FAILED), ran for 22.0 seconds, exit status=1, try=2 (of 2) +2024-03-28 17:23:20 +0000 :: hfe12 :: Forcibly submitting prepchem +2024-03-28 17:23:20 +0000 :: hfe12 :: Submission status of prepchem is pending at druby://hfe12:41075 +2024-03-28 17:25:03 +0000 :: hfe11 :: Submission status of previously pending prepchem is success, jobid=57781132 +2024-03-28 17:25:03 +0000 :: hfe11 :: Task prepchem, jobid=57781132, in state DEAD (FAILED), ran for 26.0 seconds, exit status=1, try=4 (of 2) +2024-03-28 17:32:12 +0000 :: hfe12 :: Forcibly submitting prepchem +2024-03-28 17:32:12 +0000 :: hfe12 :: Submission status of prepchem is pending at druby://hfe12:40901 +2024-03-28 17:35:04 +0000 :: hfe12 :: Submission status of previously pending prepchem is success, jobid=57781285 +2024-03-28 17:35:04 +0000 :: hfe12 :: Task prepchem, jobid=57781285, in state QUEUED (PENDING) +2024-03-28 17:40:04 +0000 :: hfe12 :: Task prepchem, jobid=57781285, in state QUEUED (PENDING) +2024-03-28 17:45:05 +0000 :: hfe12 :: Task prepchem, jobid=57781285, in state QUEUED (PENDING) +2024-03-28 17:50:04 +0000 :: hfe11 :: Task prepchem, jobid=57781285, in state QUEUED (PENDING) +2024-03-28 17:55:05 +0000 :: hfe10 :: Task prepchem, jobid=57781285, in state QUEUED (PENDING) +2024-03-28 18:00:05 +0000 :: hfe11 :: Task prepchem, jobid=57781285, in state DEAD (FAILED), ran for 24.0 seconds, exit status=1, try=6 (of 2) +2024-03-28 19:23:05 +0000 :: hfe12 :: Forcibly submitting prepchem +2024-03-28 19:23:05 +0000 :: hfe12 :: Submission status of prepchem is pending at druby://hfe12:45019 +2024-03-28 19:25:04 +0000 :: hfe12 :: Submission status of previously pending prepchem is success, jobid=57785443 +2024-03-28 19:25:04 +0000 :: hfe12 :: Task prepchem, jobid=57785443, in state QUEUED (PENDING) +2024-03-28 19:30:05 +0000 :: hfe11 :: Task prepchem, jobid=57785443, in state QUEUED (PENDING) +2024-03-28 19:30:52 +0000 :: hfe12 :: Task prepchem, jobid=57785443, in state DEAD (CANCELLED), ran for 0.0 seconds, exit status=255, try=8 (of 2) +2024-03-28 19:30:52 +0000 :: hfe12 :: Forcibly submitting prepchem +2024-03-28 19:30:52 +0000 :: hfe12 :: Submission status of prepchem is pending at druby://hfe12:44499 +2024-03-28 19:35:16 +0000 :: hfe12 :: Submission status of previously pending prepchem is success, jobid=57785595 +2024-03-28 19:35:16 +0000 :: hfe12 :: Task prepchem, jobid=57785595, in state DEAD (FAILED), ran for 25.0 seconds, exit status=1, try=10 (of 2) +2024-03-28 19:35:16 +0000 :: hfe12 :: Forcibly submitting prepchem +2024-03-28 19:35:16 +0000 :: hfe12 :: Submission status of prepchem is pending at druby://hfe12:42153 +2024-03-28 19:38:08 +0000 :: hfe12 :: Submission status of previously pending prepchem is success, jobid=57785663 +2024-03-28 19:38:08 +0000 :: hfe12 :: Task prepchem, jobid=57785663, in state SUCCEEDED (COMPLETED), ran for 33.0 seconds, exit status=0, try=12 (of 2) +2024-03-28 19:43:10 +0000 :: hfe12 :: Submitting gfsfcst +2024-03-28 19:43:10 +0000 :: hfe12 :: Submission status of gfsfcst is pending at druby://hfe12:35867 +2024-03-28 19:43:39 +0000 :: hfe12 :: Submission status of previously pending gfsfcst is success, jobid=57786101 +2024-03-28 19:43:40 +0000 :: hfe12 :: Task gfsfcst, jobid=57786101, in state QUEUED (PENDING) +2024-03-28 19:45:04 +0000 :: hfe11 :: Task gfsfcst, jobid=57786101, in state QUEUED (PENDING) +2024-03-28 19:50:04 +0000 :: hfe11 :: Task gfsfcst, jobid=57786101, in state QUEUED (PENDING) +2024-03-28 19:55:04 +0000 :: hfe12 :: Task gfsfcst, jobid=57786101, in state QUEUED (PENDING) +2024-03-28 20:00:05 +0000 :: hfe12 :: Task gfsfcst, jobid=57786101, in state QUEUED (PENDING) +2024-03-28 20:03:24 +0000 :: hfe12 :: Task gfsfcst, jobid=57786101, in state FAILED (CANCELLED), ran for 0.0 seconds, exit status=255, try=1 (of 2) +2024-03-28 20:03:24 +0000 :: hfe12 :: Submitting gfsfcst +2024-03-28 20:03:24 +0000 :: hfe12 :: Submission status of gfsfcst is pending at druby://hfe12:46405 +2024-03-28 20:04:32 +0000 :: hfe12 :: Submission status of previously pending gfsfcst is failure! sbatch: lua: This job was submitted from a host running Rocky 8. Assigning job to el8 reservation. +sbatch: error: QOSMaxWallDurationPerJobLimit +sbatch: error: Batch job submission failed: Job violates accounting/QOS policy (job submit limit, user's size and/or time limits) +2024-03-28 20:04:32 +0000 :: hfe12 :: Submitting gfsfcst +2024-03-28 20:04:32 +0000 :: hfe12 :: Submission status of gfsfcst is pending at druby://hfe12:46861 +2024-03-28 20:05:03 +0000 :: hfe12 :: Submission status of previously pending gfsfcst is success, jobid=57787004 +2024-03-28 20:05:04 +0000 :: hfe12 :: Task gfsfcst, jobid=57787004, in state QUEUED (PENDING) +2024-03-28 20:10:06 +0000 :: hfe08 :: Task gfsfcst, jobid=57787004, in state FAILED (FAILED), ran for 18.0 seconds, exit status=1, try=1 (of 2) +2024-03-28 20:10:06 +0000 :: hfe08 :: Submitting gfsfcst +2024-03-28 20:10:07 +0000 :: hfe08 :: Submission status of gfsfcst is pending at druby://hfe08:38153 +2024-03-28 20:15:08 +0000 :: hfe05 :: Submission status of previously pending gfsfcst is success, jobid=57787128 +2024-03-28 20:15:08 +0000 :: hfe05 :: Task gfsfcst, jobid=57787128, in state DEAD (FAILED), ran for 16.0 seconds, exit status=1, try=2 (of 2) +2024-03-28 20:57:17 +0000 :: hfe10 :: Forcibly submitting gfsfcst +2024-03-28 20:57:17 +0000 :: hfe10 :: Submission status of gfsfcst is pending at druby://hfe10:34337 +2024-03-28 21:00:10 +0000 :: hfe07 :: Submission status of previously pending gfsfcst is success, jobid=57788745 +2024-03-28 21:00:10 +0000 :: hfe07 :: Task gfsfcst, jobid=57788745, in state RUNNING (COMPLETING) +2024-03-28 21:05:11 +0000 :: hfe07 :: Task gfsfcst, jobid=57788745, in state DEAD (CANCELLED), ran for 7.0 seconds, exit status=15, try=4 (of 2) +2024-03-28 23:11:34 +0000 :: hfe10 :: Forcibly submitting gfsfcst +2024-03-28 23:11:34 +0000 :: hfe10 :: Submission status of gfsfcst is pending at druby://hfe10:41195 +2024-03-28 23:15:04 +0000 :: hfe12 :: Submission status of previously pending gfsfcst is success, jobid=57792615 +2024-03-28 23:15:04 +0000 :: hfe12 :: Task gfsfcst, jobid=57792615, in state DEAD (CANCELLED), ran for 11.0 seconds, exit status=15, try=6 (of 2) diff --git a/FV3GFSwfm/test_catchem/logs/2024032800.log b/FV3GFSwfm/test_catchem/logs/2024032800.log new file mode 100644 index 0000000000..51a4a9d688 --- /dev/null +++ b/FV3GFSwfm/test_catchem/logs/2024032800.log @@ -0,0 +1,16 @@ +2024-03-28 12:47:02 +0000 :: hfe06 :: Submitting gfsstage_ic +2024-03-28 12:47:02 +0000 :: hfe06 :: Submission status of gfsstage_ic is pending at druby://hfe06:34815 +2024-03-28 12:49:43 +0000 :: hfe06 :: Submission status of previously pending gfsstage_ic is success, jobid=57772948 +2024-03-28 12:49:44 +0000 :: hfe06 :: Task gfsstage_ic, jobid=57772948, in state FAILED (FAILED), ran for 19.0 seconds, exit status=1, try=1 (of 2) +2024-03-28 12:49:44 +0000 :: hfe06 :: Submitting gfsstage_ic +2024-03-28 12:49:44 +0000 :: hfe06 :: Submission status of gfsstage_ic is pending at druby://hfe06:34787 +2024-03-28 13:17:35 +0000 :: hfe06 :: Submission status of previously pending gfsstage_ic is success, jobid=57772976 +2024-03-28 13:17:36 +0000 :: hfe06 :: Task gfsstage_ic, jobid=57772976, in state DEAD (FAILED), ran for 19.0 seconds, exit status=1, try=2 (of 2) +2024-03-28 13:21:23 +0000 :: hfe06 :: Submitting gfsstage_ic +2024-03-28 13:21:23 +0000 :: hfe06 :: Submission status of gfsstage_ic is pending at druby://hfe06:41695 +2024-03-28 13:45:04 +0000 :: hfe11 :: Submission status of previously pending gfsstage_ic is success, jobid=57773577 +2024-03-28 13:45:04 +0000 :: hfe11 :: Task gfsstage_ic, jobid=57773577, in state QUEUED (PENDING) +2024-03-28 13:50:04 +0000 :: hfe11 :: Task gfsstage_ic, jobid=57773577, in state QUEUED (PENDING) +2024-03-28 13:55:03 +0000 :: hfe10 :: Task gfsstage_ic, jobid=57773577, in state QUEUED (PENDING) +2024-03-28 14:00:05 +0000 :: hfe12 :: Task gfsstage_ic, jobid=57773577, in state SUCCEEDED (COMPLETED), ran for 41.0 seconds, exit status=0, try=1 (of 2) +2024-03-28 16:20:44 +0000 :: hfe12 :: This cycle is complete: Success diff --git a/FV3GFSwfm/test_catchem/noent_test.xml b/FV3GFSwfm/test_catchem/noent_test.xml new file mode 100644 index 0000000000..8956bb423c --- /dev/null +++ b/FV3GFSwfm/test_catchem/noent_test.xml @@ -0,0 +1,345 @@ + + + + + + + + + + + + + + + + + +]> + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSwfm/test_catchem/logs/@Y@m@d@H.log + + + 202402210000 203401020000 24:00:00 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//jobs/rocoto/stage_ic.sh + + test_catchem_gfsstage_ic_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsstage_ic.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/ + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSwfm/test_catchem + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/../RUNDIRS/test_catchem + + + + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/gfs_ctrl.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/gfs_data.tile1.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/gfs_data.tile2.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/gfs_data.tile3.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/gfs_data.tile4.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/gfs_data.tile5.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/gfs_data.tile6.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/sfc_data.tile1.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/sfc_data.tile2.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/sfc_data.tile3.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/sfc_data.tile4.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/sfc_data.tile5.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/@Y@m@d@H/gfs/C384/INPUT/sfc_data.tile6.nc + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//jobs/rocoto/aerosol_init.sh + + test_catchem_gfsaerosol_init_@H + gsd-fv3 + batch + hera + 00:30:00 + 1:ppn=40:tpp=1 + 6G + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsaerosol_init.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/ + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSwfm/test_catchem + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem + NETgfs + RUNgfs + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/../RUNDIRS/test_catchem + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/gfs_ctrl.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/gfs_data.tile1.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/gfs_data.tile2.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/gfs_data.tile3.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/gfs_data.tile4.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/gfs_data.tile5.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/gfs_data.tile6.nc + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile1.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile2.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile3.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile4.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile5.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile6.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile1.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile2.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile3.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile4.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile5.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile6.nc + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//jobs/rocoto/prepchem.sh + + test_catchem_prepchem_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=2 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/logs/@Y@m@d@H/prepchem.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/ + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSwfm/test_catchem + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem + + + + BINGB/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/scratch/tmp/GBBEPx + NCGB/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/scratch/tmp/emi_C384/GBBEPx + + PUBEMI/scratch2/BMC/public/data/grids/nesdis/GBBEPx/C384/@Y@m@d/ + + EMIINPUT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/scratch/tmp/emi_C384 + CDATE@Y@m@d@H + CDUMPgfs + PDY@Y@m@d + cyc@H + + SYEAR@Y + SMONTH@m + SDAY@d + SHOUR@H + + + + /scratch2/BMC/public/data/grids/nesdis/GBBEPx/C384/@Y@m@d/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + /scratch2/BMC/public/data/grids/nesdis/GBBEPx/C384/@Y@m@d/GBBEPxFRP-MeanFRP-C384GT6_v4r0_@Y@m@d.bin + /scratch2/BMC/public/data/grids/nesdis/GBBEPx/C384/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + /scratch2/BMC/public/data/grids/nesdis/GBBEPx/C384/GBBEPxFRP-MeanFRP-C384GT6_v4r0_@Y@m@d.bin + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/GBBEPx/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + /scratch2/BMC/gsd-fv3-dev/Judy.K.Henderson/GBBEPx/@Y@m@d/GBBEPxFRP-MeanFRP-C384GT6_v4r0_@Y@m@d.bin + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//jobs/rocoto/fcst.sh + + test_catchem_gfsfcst_@H + gsd-fv3 + batch + hera + 02:40:00 + + 33:ppn=20:tpp=2 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/ + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSwfm/test_catchem + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem + NETgfs + RUNgfs + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/../RUNDIRS/test_catchem + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/INPUT/sfc_data.tile6.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//jobs/rocoto/atmos_products.sh + + test_catchem_gfsatmos_prod#grp#_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=24:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsatmos_prod#grp#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/ + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSwfm/test_catchem + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem + NETgfs + RUNgfs + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/../RUNDIRS/test_catchem + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.log#dep#.txt + + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//jobs/rocoto/arch.sh + + test_catchem_gfsarch_@H + gsd-fv3 + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 2048M + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/ + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSwfm/test_catchem + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem + NETgfs + RUNgfs + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/../RUNDIRS/test_catchem + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march//FV3GFSrun/test_catchem/gfs.@Y@m@d/@H/atmos/img/SW/files.zip + + + + + + + + diff --git a/FV3GFSwfm/test_catchem/rt_p8-chem.xml b/FV3GFSwfm/test_catchem/rt_p8-chem.xml new file mode 100644 index 0000000000..d4a73717f4 --- /dev/null +++ b/FV3GFSwfm/test_catchem/rt_p8-chem.xml @@ -0,0 +1,304 @@ + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202402210000 203401020000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_ctrl.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile1.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile2.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile3.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile4.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile5.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile1.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile2.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile3.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile4.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile5.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/aerosol_init.sh + + &PSLOT;_gfsaerosol_init_@H + gsd-fv3 + batch + hera + 00:30:00 + 1:ppn=40:tpp=1 + 6G + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsaerosol_init.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + CDATE@Y@m@d@H + CDUMP&CDUMP; + PDY@Y@m@d + cyc@H + + + + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_ctrl.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile1.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile2.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile3.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile4.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile5.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile6.nc + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile1.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile2.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile3.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile4.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile5.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile6.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile1.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile2.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile3.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile4.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile5.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile6.nc + + + + + + + + &JOBS_DIR;/prepchem.sh + + &PSLOT;_prepchem_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=2 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/prepchem.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + + + + BINGB&BINGB; + NCGB&NCGB; + + PUBEMI&PUBEMI;/@Y@m@d/ + + EMIINPUT&EMIINPUT; + CDATE@Y@m@d@H + CDUMP&CDUMP; + PDY@Y@m@d + cyc@H + + SYEAR@Y + SMONTH@m + SDAY@d + SHOUR@H + + + + &PUBEMI;/@Y@m@d/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + &PUBEMI;/@Y@m@d/GBBEPxFRP-MeanFRP-&CASE;GT6_v4r0_@Y@m@d.bin + + &ARCEMI;/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + &ARCEMI;/@Y@m@d/GBBEPxFRP-MeanFRP-&CASE;GT6_v4r0_@Y@m@d.bin + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:40:00 + + 33:ppn=20:tpp=2 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + + + + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/sfc_data.tile6.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.log#dep#.txt + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + gsd-fv3 + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 2048M + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + + + + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/img/SW/files.zip + + + + + + diff --git a/FV3GFSwfm/test_catchem/runcmds b/FV3GFSwfm/test_catchem/runcmds new file mode 100644 index 0000000000..45283166a5 --- /dev/null +++ b/FV3GFSwfm/test_catchem/runcmds @@ -0,0 +1,4 @@ + + +rocotorun -w test_catchem.xml -d test_catchem.db +rocotostat -w test_catchem.xml -d test_catchem.db diff --git a/FV3GFSwfm/test_catchem/test_catchem.crontab b/FV3GFSwfm/test_catchem/test_catchem.crontab new file mode 100644 index 0000000000..fded4cba02 --- /dev/null +++ b/FV3GFSwfm/test_catchem/test_catchem.crontab @@ -0,0 +1,5 @@ + +#################### test_catchem #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.6/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem/test_catchem.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem/test_catchem.xml +################################################################# diff --git a/FV3GFSwfm/test_catchem/test_catchem.db b/FV3GFSwfm/test_catchem/test_catchem.db new file mode 100644 index 0000000000000000000000000000000000000000..1f44507acbe653e74257fb8d6d95f57e87635634 GIT binary patch literal 28672 zcmeI)PiWg#90%|xTejs^vO2OfgbpIZ~QjA`C;WN)jRL+k}t^UHc11ci8>`eP@RDN9@u%-u?Fn zfu0TXUlQ?F>=)rz;TC_N9$drv>m>YdJh zcN-~G)qG8rYWdZ&Ds{`Gg+0x7wt5Fedb(CxQ{%^Ph9XmQbIkD_ua-vZPP45!yY^rw z?5k@~EG?J~sZ^<{*VL+Xy;@q!S8qxmsy7po8!mm6uNFSYR~O`DDicpgZdgO3k4pV^ z6Vh(Wb_SOz?nUj>8si$<&FT#=tL@Ki-8#@Md$1GoWi==somIy&2d#NY#gD%TMy4hw znXfx84(x0~9&?k}w$)%S)wUvF8u-RDb4w&U9P z>S=0@d7y9k&U~U|K21paq3L+#ouL{W%N(w-*=gQxQuCoJ8Xb#P*=kC3YLw&0vw?^^ zckWJlo%iyyyW5}-pj$SY;MtyOb(%NrO=D=N^~1Jl`AQnC<}gRY(#)pW+!@5gkFPM1 zso7cP?(}JX4c+isqwj&UInvV6S?*Y^PJ6_#xPg88R910kt7$n$((3wpSeF@?pgcW}z;?Lqg;v>3)1p*L&00bZa0SG_<0uX=z1Rwx` zmn9&w5yG+@$LUWGzcNDKS0)&*faMJK9`V-D$zz&b)VpdT*gLB~`j%dPa*wW$Cj$M# zhwqVop}_x6L|*5`-^8@|UF@k~$L`0ziEYGYgujHJ_!C|iIR4?w>NeVi00bZa0SG_< z0uX=z1YV?o664A23>%oA=NOL5=5os^Mb6S+EgE&(aUVuP3mcoOYo%JPRJnG#7ZQR* z7{tj!x01;@hT)j!R#H^+#X`PPP|IbtD26Y}{JBGftK>qrNm)F1u-xnCIZ_l2A{um) zb{ieUEiL6Tejk6=*hn@+vzuj^xj9PeYAj_HCEd34c70dhcTEEun}vd^7S&>5eeHT# zt*J$RUd&z`Gw#BeanUj3xH01*W5$KYi&Iz{cZH48afo{3kXz1W6gsu(Hx_pEEwk

}vwdA{t+*$H| zzC>q#;F27CP7A5w7U*NWzXgSPwlis(*-(i48Nu1kcz^#-zw}SUzr{aa^ z00Izz00bZa0SG_<0uX?}_yU&$L6UA*o$aHQ#l>Aims2a5WJb + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202403210000 202403210000 24:00:00 + + + + &JOBS_DIR;/stage_ic.sh + + &PSLOT;_gfsstage_ic_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsstage_ic.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_ctrl.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile1.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile2.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile3.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile4.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile5.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile1.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile2.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile3.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile4.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile5.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/aerosol_init.sh + + &PSLOT;_gfsaerosol_init_@H + gsd-fv3 + batch + hera + 00:30:00 + 1:ppn=40:tpp=1 + 6G + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsaerosol_init.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + RUNgfs + CDATE@Y@m@d@H + CDUMP&CDUMP; + PDY@Y@m@d + cyc@H + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_ctrl.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile1.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile2.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile3.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile4.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile5.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/gfs_data.tile6.nc + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile1.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile2.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile3.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile4.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile5.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_core.res.tile6.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile1.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile2.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile3.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile4.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile5.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/@Y@m@d.@H0000.fv_tracer.res.tile6.nc + + + + + + + + &JOBS_DIR;/prepchem.sh + + &PSLOT;_prepchem_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=2 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/prepchem.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + + + + BINGB&BINGB; + NCGB&NCGB; + + + PUBEMI&PUBEMI; + EMIINPUT&EMIINPUT; + CDATE@Y@m@d@H + CDUMP&CDUMP; + PDY@Y@m@d + cyc@H + + SYEAR@Y + SMONTH@m + SDAY@d + SHOUR@H + + + + &PUBEMI;/@Y@m@d/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + &PUBEMI;/@Y@m@d/GBBEPxFRP-MeanFRP-&CASE;GT6_v4r0_@Y@m@d.bin + &PUBEMI;/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + &PUBEMI;/GBBEPxFRP-MeanFRP-&CASE;GT6_v4r0_@Y@m@d.bin + &ARCEMI;/GBBEPx.FRP.003.@Y@m@d.FV3.C384Grid.tile6.bin + &ARCEMI;/@Y@m@d/GBBEPxFRP-MeanFRP-&CASE;GT6_v4r0_@Y@m@d.bin + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + debug + hera + 00:30:00 + + + 33:ppn=20:tpp=2 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + RUNgfs + CDUMPgfs + CDATE@Y@m@d@H + cyc@H + + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/&CDUMP;.@Y@m@d/@H/model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/atmos_products.sh + + &PSLOT;_gfsatmos_prod#grp#_@H + gsd-fv3 + batch + hera + 00:15:00 + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsatmos_prod#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + RUNgfs + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + FHRGRP#grp# + FHRLST#lst# + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + &ROTDIR;/gfs.@Y@m@d/@H/atmos/gfs.t@Hz.log#dep#.txt + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + gsd-fv3 + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 2048M + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + RUNgfs + CDUMPgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/img/SW/files.zip + + + + + + + + diff --git a/FV3GFSwfm/test_catchem/test_catchem.xml_gen b/FV3GFSwfm/test_catchem/test_catchem.xml_gen new file mode 100644 index 0000000000..049b3d2912 --- /dev/null +++ b/FV3GFSwfm/test_catchem/test_catchem.xml_gen @@ -0,0 +1,326 @@ + + + + + + +]> + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem/logs/@Y@m@d@H.log + + + 202403200000 202403200000 24:00:00 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/stage_ic.sh + + test_catchem_gfsstage_ic_@H + fv3-cpu + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsstage_ic.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + + + + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_ctrl.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile1.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile2.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile3.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile4.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile5.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/gfs_data.tile6.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile1.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile2.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile3.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile4.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile5.nc + /scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs/GEFS-NoahMP-aerosols-p8c_refactored/@Y@m@d@H/atmos/sfc_data.tile6.nc + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/fcst.sh + + test_catchem_gfsfcst_@H + fv3-cpu + batch + hera + 06:00:00 + 32:ppn=40:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + + + + + + + + + + + + _f000-f006 _f009-f015 _f018-f024 _f027-f033 _f036-f042 _f045-f051 _f054-f060 _f063-f069 _f072-f078 _f081-f087 _f090-f096 _f099-f105 _f108-f114 _f117-f120 + f006 f015 f024 f033 f042 f051 f060 f069 f078 f087 f096 f105 f114 f120 + f000_f003_f006 f009_f012_f015 f018_f021_f024 f027_f030_f033 f036_f039_f042 f045_f048_f051 f054_f057_f060 f063_f066_f069 f072_f075_f078 f081_f084_f087 f090_f093_f096 f099_f102_f105 f108_f111_f114 f117_f120 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/atmos_products.sh + + test_catchem_gfsatmos_prod#grp#_@H + fv3-cpu + batch + hera + 00:15:00 + 1:ppn=24:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsatmos_prod#grp#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + FHRLST#lst# + COMPONENTatmos + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep# + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/tracker.sh + + test_catchem_gfstracker_@H + fv3-cpu + batch + hera + 00:10:00 + 1:ppn=1:tpp=1 + 4G + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfstracker.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/genesis.sh + + test_catchem_gfsgenesis_@H + fv3-cpu + batch + hera + 00:25:00 + 1:ppn=1:tpp=1 + 10G + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsgenesis.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + + + + + + + + + + g2g1 g2o1 pcp1 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/metp.sh + + test_catchem_gfsmetp#metpcase#_@H + fv3-cpu + batch + hera + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + SDATE_GFS2024032000 + METPCASE#metpcase# + + + + + + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/arch.sh + + test_catchem_gfsarch_@H + fv3-cpu + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + + + + + + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/jobs/rocoto/cleanup.sh + + test_catchem_gfscleanup_@H + fv3-cpu + batch + hera + 00:15:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun/test_catchem/logs/@Y@m@d@H/gfscleanup.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSwfm/test_catchem + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/catchem_march/FV3GFSrun + DATAROOT/scratch1/NCEPDEV/stmp2/Judy.K.Henderson/RUNDIRS/test_catchem + + + + + + + + + + diff --git a/FV3GFSwfm/test_catchem/test_catchem_lock.db b/FV3GFSwfm/test_catchem/test_catchem_lock.db new file mode 100644 index 0000000000000000000000000000000000000000..3b6e37277874097ffe9bee6aad473dfe3890f193 GIT binary patch literal 8192 zcmeI#yAA prep_chem_sources.inp . $MODULESHOME/init/sh 2>/dev/null - module list - module purge - module list - module load intel/14.0.2 - module load szip/2.1 - module load hdf5/1.8.14 - module load netcdf/4.3.0 - module list ./prep_chem_sources || fail "ERROR: prep_chem_sources failed." status=$? if [ $status -ne 0 ]; then @@ -81,15 +82,7 @@ fi for n in $(seq 1 6); do tiledir=tile${n} - #mkdir -p $tiledir - #cd $tiledir - EMIINPUT=/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop/emi_${CASE} -# if [ ${EMIYEAR} -gt 2018 ]; then eval $NLN $EMIINPUT/EMI_$EMIYEAR/$SMONTH/emi_data.tile${n}.nc . -# else -# eval $NLN $EMIINPUT/EMI/$SMONTH/emi_data.tile${n}.nc . -# fi - eval $NLN $EMIINPUT/EMI2/$SMONTH/emi2_data.tile${n}.nc . #eval $NLN $EMIINPUT/fengsha_2023/$SMONTH/dust_data.tile${n}.nc . eval $NLN $EMIINPUT/fengsha_2023/12month/dust_data_g12m.tile${n}.nc . @@ -106,44 +99,43 @@ for n in $(seq 1 6); do eval $NLN ${CASE}-T-${emiss_date}0000-SO2-bb.bin ebu_so2.dat fi if [ $EMITYPE -eq 2 ]; then - #if [ ${res} -eq 384 ]; then - #DIRGB=/scratch1/BMC/gsd-fv3-dev/lzhang/GBBEPx - DIRGB=/scratch2/NCEPDEV/naqfc/Kate.Zhang/GBBPEx_v004/$SYEAR - #else - # DIRGB=/scratch1/BMC/gsd-fv3-dev/lzhang/GBBEPx/${CASE} - #fi - NCGB=/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop/emi_${CASE}/GBBEPx - PUBEMI=/scratch2/BMC/public/data/grids/sdsu/emissions - #PUBEMI=/scratch2/NCEPDEV/stmp1/Li.Pan/tmp - emiss_date1="$SYEAR$SMONTH$SDAY" # default value for branch testing echo "emiss_date: $emiss_date1" - #mkdir -p $DIRGB/$emiss_date1 - #$NCP $PUBEMI/*${emiss_date1}.*.bin $DIRGB/$emiss_date1/ + ## JKH - uncomment next 2 lines if not running FV3-CHEM prepchem task + mkdir -p $BINGB/$emiss_date1 + $NCP ${PUBEMI}/*${emiss_date1}.bin ${BINGB}/$emiss_date1/ - - if [[ -f $NCGB/${emiss_date1}/FIRE_GBBEPx_data.tile${n}.nc ]]; then - echo "NetCDF GBBEPx File $NCGB/${emiss_date1}/FIRE_GBBEPx_data.tile${n}.nc exists, just link." + if [[ -f ${NCGB}/${emiss_date1}/FIRE_GBBEPx_data.tile${n}.nc ]]; then + echo "NetCDF GBBEPx File ${BINGB}/${emiss_date1}/FIRE_GBBEPx_data.tile${n}.nc exists, just link." else - - #if [ ${SYEAR} -eq 2016 ]; then - BC=GBBEPxemis-BC-${CASE}GT${n}_v4r0_${emiss_date1}.bin - OC=GBBEPxemis-OC-${CASE}GT${n}_v4r0_${emiss_date1}.bin - PM25=GBBEPxemis-PM25-${CASE}GT${n}_v4r0_${emiss_date1}.bin - SO2=GBBEPxemis-SO2-${CASE}GT${n}_v4r0_${emiss_date1}.bin - FRP=GBBEPxFRP-MeanFRP-${CASE}GT${n}_v4r0_${emiss_date1}.bin - #else - # BC=GBBEPx.bc.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin - # OC=GBBEPx.oc.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin - # PM25=GBBEPx.pm25.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin - # SO2=GBBEPx.so2.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin - # FRP=meanFRP.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin - #fi + + if [ ${SYEAR} -eq 2016 -o ${emiss_date1} -ge 20230115 ]; then ## JKH - change date + if [ ${emiss_date1} -ge 20230509 ]; then ## JKH - change date + BC=GBBEPxemis-BC-${CASE}GT${n}_v4r0_${emiss_date1}.bin + OC=GBBEPxemis-OC-${CASE}GT${n}_v4r0_${emiss_date1}.bin + SO2=GBBEPxemis-SO2-${CASE}GT${n}_v4r0_${emiss_date1}.bin + FRP=GBBEPxFRP-MeanFRP-${CASE}GT${n}_v4r0_${emiss_date1}.bin + PM25=GBBEPxemis-PM25-${CASE}GT${n}_v4r0_${emiss_date1}.bin + else + BC=GBBEPx.emis_BC.003.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + OC=GBBEPx.emis_OC.003.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + SO2=GBBEPx.emis_SO2.003.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + FRP=GBBEPx.FRP.003.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + if [ ${SYEAR} -eq 2016 ]; then + PM25=GBBEPx.emis_PM25.003.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + else + PM25=GBBEPx.emis_PM2.5.003.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + fi + fi + else + BC=GBBEPx.bc.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + OC=GBBEPx.oc.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + PM25=GBBEPx.pm25.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + SO2=GBBEPx.so2.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + FRP=meanFRP.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + fi mkdir -p $NCGB/${emiss_date1} - set -ue - module load intel/19.0.5.281 netcdf szip hdf5 - set -x $NLN $EXECgfs/mkncgbbepx . ./mkncgbbepx < prep_chem_sources.inp + . $MODULESHOME/init/sh 2>/dev/null + module list + module purge + module list + module load intel/14.0.2 + module load szip/2.1 + module load hdf5/1.8.14 + module load netcdf/4.3.0 + module list + ./prep_chem_sources || fail "ERROR: prep_chem_sources failed." + status=$? + if [ $status -ne 0 ]; then + echo "error prep_chem_sources failed $status " + exit $status + fi +fi + +for n in $(seq 1 6); do + tiledir=tile${n} + #mkdir -p $tiledir + #cd $tiledir + EMIINPUT=/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop/emi_${CASE} +# if [ ${EMIYEAR} -gt 2018 ]; then + eval $NLN $EMIINPUT/EMI_$EMIYEAR/$SMONTH/emi_data.tile${n}.nc . +# else +# eval $NLN $EMIINPUT/EMI/$SMONTH/emi_data.tile${n}.nc . +# fi + + eval $NLN $EMIINPUT/EMI2/$SMONTH/emi2_data.tile${n}.nc . + #eval $NLN $EMIINPUT/fengsha_2023/$SMONTH/dust_data.tile${n}.nc . + eval $NLN $EMIINPUT/fengsha_2023/12month/dust_data_g12m.tile${n}.nc . + + if [ $EMITYPE -eq 1 ]; then + mkdir -p $tiledir + cd $tiledir + eval $NLN ${CASE}-T-${emiss_date}0000-BBURN3-bb.bin ebu_pm_10.dat + eval $NLN ${CASE}-T-${emiss_date}0000-SO4-bb.bin ebu_sulf.dat + eval $NLN ${CASE}-T-${emiss_date}0000-plume.bin plumestuff.dat + eval $NLN ${CASE}-T-${emiss_date}0000-OC-bb.bin ebu_oc.dat + eval $NLN ${CASE}-T-${emiss_date}0000-BC-bb.bin ebu_bc.dat + eval $NLN ${CASE}-T-${emiss_date}0000-BBURN2-bb.bin ebu_pm_25.dat + eval $NLN ${CASE}-T-${emiss_date}0000-SO2-bb.bin ebu_so2.dat + fi + if [ $EMITYPE -eq 2 ]; then + #if [ ${res} -eq 384 ]; then + #DIRGB=/scratch1/BMC/gsd-fv3-dev/lzhang/GBBEPx + DIRGB=/scratch2/NCEPDEV/naqfc/Kate.Zhang/GBBPEx_v004/$SYEAR + #else + # DIRGB=/scratch1/BMC/gsd-fv3-dev/lzhang/GBBEPx/${CASE} + #fi + NCGB=/scratch1/BMC/gsd-fv3-dev/Haiqin.Li/Develop/emi_${CASE}/GBBEPx + PUBEMI=/scratch2/BMC/public/data/grids/sdsu/emissions + #PUBEMI=/scratch2/NCEPDEV/stmp1/Li.Pan/tmp + + emiss_date1="$SYEAR$SMONTH$SDAY" # default value for branch testing + echo "emiss_date: $emiss_date1" + #mkdir -p $DIRGB/$emiss_date1 + #$NCP $PUBEMI/*${emiss_date1}.*.bin $DIRGB/$emiss_date1/ + + + if [[ -f $NCGB/${emiss_date1}/FIRE_GBBEPx_data.tile${n}.nc ]]; then + echo "NetCDF GBBEPx File $NCGB/${emiss_date1}/FIRE_GBBEPx_data.tile${n}.nc exists, just link." + else + + #if [ ${SYEAR} -eq 2016 ]; then + BC=GBBEPxemis-BC-${CASE}GT${n}_v4r0_${emiss_date1}.bin + OC=GBBEPxemis-OC-${CASE}GT${n}_v4r0_${emiss_date1}.bin + PM25=GBBEPxemis-PM25-${CASE}GT${n}_v4r0_${emiss_date1}.bin + SO2=GBBEPxemis-SO2-${CASE}GT${n}_v4r0_${emiss_date1}.bin + FRP=GBBEPxFRP-MeanFRP-${CASE}GT${n}_v4r0_${emiss_date1}.bin + #else + # BC=GBBEPx.bc.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + # OC=GBBEPx.oc.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + # PM25=GBBEPx.pm25.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + # SO2=GBBEPx.so2.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + # FRP=meanFRP.${emiss_date1}.FV3.${CASE}Grid.tile${n}.bin + #fi + + mkdir -p $NCGB/${emiss_date1} + set -ue + module load intel/19.0.5.281 netcdf szip hdf5 + set -x + $NLN $EXECgfs/mkncgbbepx . + ./mkncgbbepx <