diff --git a/.gitignore b/.gitignore index bd53d9c20..9b863f43b 100644 --- a/.gitignore +++ b/.gitignore @@ -146,33 +146,40 @@ build*/ install*/ # Ignore the following bundle repositories -ecbuild/ -atlas/ -crtm/ -eckit/ -fckit/ -femps/ -fms/ -fv3/ -fv3-jedi/ -fv3-jedi-data/ -fv3-jedi-lm/ -gsibec/ -gsw/ -icepack/ -/ioda/ -ioda-data/ -iodaconv/ -jedicmake/ -land-imsproc/ -land-jediincr/ -mom6/ -oops/ -saber/ -saber-data/ -/soca/ -test-data-release/ -ufo/ -ufo-data/ -vader/ +/ecbuild +/atlas +/crtm +/eckit +/fckit +/femps +/fms +/fv3 +/fv3-jedi +/fv3-jedi-data +/fv3-jedi-lm +/gsibec +/gsw +/icepack +/ioda +/ioda-data +/iodaconv +/jedicmake +/land-imsproc +/land-jediincr +/mom6 +/oops +/saber +/saber-data +/soca +/test-data-release +/ufo +/ufo-data +/vader /gdas-utils +/sorc/fv3-jedi-data +/sorc/ufo-data +/sorc/ioda-data +/sorc/test-data-release +/sorc/gdas +/sorc/gdas-utils + diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..ce3d33e2c --- /dev/null +++ b/.gitmodules @@ -0,0 +1,78 @@ +[submodule "sorc/jedicmake"] + path = sorc/jedicmake + url = https://github.com/jcsda/jedi-cmake.git +[submodule "sorc/gsibec"] + path = sorc/gsibec + url = https://github.com/GEOS-ESM/GSIbec.git + branch = develop +[submodule "sorc/gsw"] + path = sorc/gsw + url = https://github.com/jcsda-internal/GSW-Fortran.git + branch = develop +[submodule "sorc/oops"] + path = sorc/oops + url = https://github.com/jcsda/oops.git + branch = develop +[submodule "sorc/vader"] + path = sorc/vader + url = https://github.com/jcsda/vader.git + branch = develop +[submodule "sorc/saber"] + path = sorc/saber + url = https://github.com/jcsda/saber.git + branch = develop +[submodule "sorc/ioda"] + path = sorc/ioda + url = https://github.com/jcsda/ioda.git + branch = develop +[submodule "sorc/ufo"] + path = sorc/ufo + url = https://github.com/jcsda/ufo.git + branch = develop +[submodule "sorc/fv3"] + path = sorc/fv3 + url = https://github.com/jcsda/GFDL_atmos_cubed_sphere.git + branch = release-stable +[submodule "sorc/femps"] + path = sorc/femps + url = https://github.com/jcsda/femps.git + branch = develop +[submodule "sorc/fv3-jedi-lm"] + path = sorc/fv3-jedi-lm + url = https://github.com/jcsda/fv3-jedi-linearmodel.git + branch = develop +[submodule "sorc/fv3-jedi"] + path = sorc/fv3-jedi + url = https://github.com/jcsda/fv3-jedi.git + branch = develop +[submodule "sorc/icepack"] + path = sorc/icepack + url = https://github.com/JCSDA-internal/Icepack.git + branch = feature/ecbuild-new +[submodule "sorc/mom6"] + path = sorc/mom6 + url = https://github.com/jcsda-internal/MOM6.git + branch = main-ecbuild +[submodule "sorc/soca"] + path = sorc/soca + url = https://github.com/jcsda-internal/soca.git + branch = develop +[submodule "sorc/iodaconv"] + path = sorc/iodaconv + url = https://github.com/JCSDA-internal/ioda-converters.git + branch = develop +[submodule "sorc/land-imsproc"] + path = sorc/land-imsproc + url = https://github.com/NOAA-PSL/land-IMS_proc.git + branch = develop +[submodule "sorc/land-jediincr"] + path = sorc/land-jediincr + url = https://github.com/NOAA-PSL/land-apply_jedi_incr.git + branch = develop +[submodule "sorc/crtm"] + path = sorc/crtm + url = https://github.com/jcsda/crtm.git +[submodule "sorc/fms"] + path = sorc/fms + url = https://github.com/jcsda/fms.git + branch = release-stable diff --git a/CMakeLists.txt b/CMakeLists.txt index 2091893fd..f4f2fd98f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,5 +1,5 @@ # ------------------------------------------------------------------------- # -# Application for all the components needed for the GDAS system # +# Application for the GDAS system # # ------------------------------------------------------------------------- # # Check for minimim cmake requirement @@ -36,135 +36,8 @@ option(BUILD_GDASBUNDLE "Build GDAS Bundle" ON) option(CLONE_JCSDADATA "Clone JCSDA test data repositories" OFF) option(WORKFLOW_TESTS "Include global-workflow dependent tests" OFF) -# Initialize bundle -# ----------------- -ecbuild_bundle_initialize() - -# Build bundle source code. -if(BUILD_GDASBUNDLE) - -# jedi-cmake - ecbuild_bundle( PROJECT jedicmake GIT "https://github.com/jcsda/jedi-cmake.git" TAG 1.4.0 RECURSIVE ) - include( jedicmake/cmake/Functions/git_functions.cmake ) - -# ECMWF libraries - option("BUNDLE_SKIP_ECKIT" "Don't build eckit" "ON" ) # Skip eckit build unless user passes -DBUNDLE_SKIP_ECKIT=OFF - option("BUNDLE_SKIP_FCKIT" "Don't build fckit" "ON") # Skip fckit build unless user passes -DBUNDLE_SKIP_FCKIT=OFF - option("BUNDLE_SKIP_ATLAS" "Don't build atlas" "ON") # Skip atlas build unless user passes -DBUNDLE_SKIP_ATLAS=OFF - -# turn off optional OOPS toy models - option( ENABLE_LORENZ95_MODEL "Build LORENZ95 toy model" OFF ) - option( ENABLE_QG_MODEL "Build QG toy model" OFF ) - - ecbuild_bundle( PROJECT eckit GIT "https://github.com/ecmwf/eckit.git" TAG 1.16.0 ) - ecbuild_bundle( PROJECT fckit GIT "https://github.com/ecmwf/fckit.git" TAG 0.9.2 ) - ecbuild_bundle( PROJECT atlas GIT "https://github.com/ecmwf/atlas.git" TAG 0.35.0 ) - -# External (required) observation operators - option("BUNDLE_SKIP_CRTM" "Don't build CRTM" "OFF") # Don't build crtm unless user passes -DBUNDLE_SKIP_CRTM=OFF - ecbuild_bundle( PROJECT crtm GIT "https://github.com/JCSDA/crtm.git" TAG v2.4.1-jedi.1 ) - -# Build GSI-B - option(BUILD_GSIBEC "Build GSI-B" OFF) - if(BUILD_GSIBEC) - ecbuild_bundle( PROJECT gsibec GIT "https://github.com/GEOS-ESM/GSIbec.git" TAG 1.1.2 ) - endif() - -# Gibbs seawater - ecbuild_bundle( PROJECT gsw GIT "https://github.com/jcsda-internal/GSW-Fortran.git" BRANCH develop ) - -# Core JEDI repositories - ecbuild_bundle( PROJECT oops GIT "https://github.com/jcsda/oops.git" BRANCH develop) - ecbuild_bundle( PROJECT vader GIT "https://github.com/jcsda/vader.git" BRANCH develop ) - ecbuild_bundle( PROJECT saber GIT "https://github.com/jcsda/saber.git" BRANCH develop ) - option(ENABLE_IODA_DATA "Obtain ioda test data from ioda-data repository (vs tarball)" ON) - ecbuild_bundle( PROJECT ioda GIT "https://github.com/jcsda/ioda.git" BRANCH develop ) - option(ENABLE_UFO_DATA "Obtain ufo test data from ufo-data repository (vs tarball)" ON) - ecbuild_bundle( PROJECT ufo GIT "https://github.com/jcsda/ufo.git" BRANCH develop ) - -# FMS and FV3 dynamical core - ecbuild_bundle( PROJECT fms GIT "https://github.com/jcsda/FMS.git" BRANCH release-stable ) - ecbuild_bundle( PROJECT fv3 GIT "https://github.com/jcsda/GFDL_atmos_cubed_sphere.git" BRANCH release-stable ) - -# fv3-jedi and associated repositories - ecbuild_bundle( PROJECT femps GIT "https://github.com/jcsda/femps.git" BRANCH develop ) - ecbuild_bundle( PROJECT fv3-jedi-lm GIT "https://github.com/jcsda/fv3-jedi-linearmodel.git" BRANCH develop ) - option(ENABLE_FV3_JEDI_DATA "Obtain fv3-jedi test data from fv3-jedi-data repository (vs tarball)" ON) - ecbuild_bundle( PROJECT fv3-jedi GIT "https://github.com/jcsda/fv3-jedi.git" BRANCH develop ) - -# SOCA associated repositories -# TODO: Move the Icepack fork to EMC github - set(BUILD_ICEPACK "ON" CACHE STRING "Build the icepack library") - if ( BUILD_ICEPACK ) - ecbuild_bundle( PROJECT icepack GIT "https://github.com/JCSDA-internal/Icepack.git" BRANCH feature/ecbuild-new ) - endif() - ecbuild_bundle( PROJECT mom6 GIT "https://github.com/jcsda-internal/MOM6.git" BRANCH main-ecbuild RECURSIVE ) - ecbuild_bundle( PROJECT soca GIT "https://github.com/jcsda-internal/soca.git" BRANCH develop ) - - # Build JEDI/DA or other peripherals - ecbuild_bundle( PROJECT gdas-utils SOURCE "./utils" ) - -# Build IODA converters - option(BUILD_IODA_CONVERTERS "Build IODA Converters" ON) - if(BUILD_IODA_CONVERTERS) - ecbuild_bundle( PROJECT iodaconv GIT "https://github.com/JCSDA-internal/ioda-converters.git" BRANCH develop ) - endif() - -# Land associated repositories - ecbuild_bundle( PROJECT land-imsproc GIT "https://github.com/NOAA-PSL/land-IMS_proc.git" TAG 6373819 ) - ecbuild_bundle( PROJECT land-jediincr GIT "https://github.com/NOAA-PSL/land-apply_jedi_incr.git" TAG 2923344) - -# ioda, ufo, fv3-jedi, and saber test data -#--------------------------------- -if(CLONE_JCSDADATA) - - # If IODA branch is being built set GIT_BRANCH_FUNC to IODA's current branch. - # If a tagged version of IODA is being built set GIT_TAG_FUNC to ioda's current tag. In this case, - # IODA test files will be download from UCAR DASH and ioda-data repo will not be cloned. - # When LOCAL_PATH_JEDI_TESTFILES is set to the directory of IODA test files stored - # in a local directory, ioda-data repo will not be cloned - - find_branch_name(REPO_DIR_NAME ioda) - # When LOCAL_PATH_JEDI_TESTFILES is set to the directory of IODA test files stored - # in a local directory, ioda-data repo will not be cloned - if( NOT DEFINED ENV{LOCAL_PATH_JEDI_TESTFILES} AND NOT DEFINED GIT_TAG_FUNC ) - ecbuild_bundle( PROJECT ioda-data GIT "https://github.com/JCSDA-internal/ioda-data.git" BRANCH develop ) - - # If IODA's current branch is available in ioda-data repo, that branch will be checked out - branch_checkout (REPO_DIR_NAME ioda-data - BRANCH ${GIT_BRANCH_FUNC} ) - endif() - - # same procedure for ufo-data - find_branch_name(REPO_DIR_NAME ufo) - if( NOT DEFINED ENV{LOCAL_PATH_JEDI_TESTFILES} AND NOT DEFINED GIT_TAG_FUNC ) - ecbuild_bundle( PROJECT ufo-data GIT "https://github.com/JCSDA-internal/ufo-data.git" BRANCH develop ) - - # If UFO's current branch is available in ioda-data repo, that branch will be checked out - branch_checkout (REPO_DIR_NAME ufo-data - BRANCH ${GIT_BRANCH_FUNC} ) - endif() - - # same procedure for fv3-jedi-data - find_branch_name(REPO_DIR_NAME fv3-jedi) - if( NOT DEFINED ENV{LOCAL_PATH_JEDI_TESTFILES} AND NOT DEFINED GIT_TAG_FUNC ) - ecbuild_bundle( PROJECT fv3-jedi-data GIT "https://github.com/JCSDA-internal/fv3-jedi-data.git" BRANCH develop ) - - # If fv3-jedi's current branch is available in ioda-data repo, that branch will be checked out - branch_checkout (REPO_DIR_NAME fv3-jedi-data - BRANCH ${GIT_BRANCH_FUNC} ) - endif() - - endif(CLONE_JCSDADATA) - -endif(BUILD_GDASBUNDLE) - # Install utility scripts. add_subdirectory(ush) # Include testing. add_subdirectory(test) - -# Finalize bundle -# --------------- -ecbuild_bundle_finalize() diff --git a/build.sh b/build.sh index 1d80abba8..fca9afedd 100755 --- a/build.sh +++ b/build.sh @@ -70,7 +70,7 @@ while getopts "p:t:c:hvdfa" opt; do done case ${BUILD_TARGET} in - hera | orion) + hera | orion | hercules) echo "Building GDASApp on $BUILD_TARGET" source $dir_root/ush/module-setup.sh module use $dir_root/modulefiles @@ -113,7 +113,7 @@ echo "Configuring ..." set -x cmake \ ${CMAKE_OPTS:-} \ - $dir_root + $dir_root/sorc set +x # Build diff --git a/ci/stable_driver.sh b/ci/stable_driver.sh index c75015c97..4172d6659 100755 --- a/ci/stable_driver.sh +++ b/ci/stable_driver.sh @@ -64,19 +64,10 @@ cd $stableroot/$datestr/global-workflow/sorc/gdas.cd git checkout develop git pull -# ============================================================================== -# run ecbuild to get the repos cloned -mkdir -p build - -cd build -ecbuild ../ -cd .. -rm -rf build - # ============================================================================== # update the hashes to the most recent gdasdir=$stableroot/$datestr/global-workflow/sorc/gdas.cd -$my_dir/stable_mark.sh $gdasdir +$my_dir/../ush/submodules/update_develop.sh $gdasdir # ============================================================================== # run the automated testing @@ -85,22 +76,16 @@ ci_status=$? total=0 if [ $ci_status -eq 0 ]; then cd $gdasdir - # copy the CMakeLists file for safe keeping - cp $gdasdir/CMakeLists.txt $gdasdir/CMakeLists.txt.new - total=$(($total+$?)) - if [ $total -ne 0 ]; then - echo "Unable to cp CMakeLists" >> $stableroot/$datestr/output - fi # checkout feature/stable-nightly git stash total=$(($total+$?)) if [ $total -ne 0 ]; then - echo "Unable to cp CMakeLists" >> $stableroot/$datestr/output + echo "Unable to git stash" >> $stableroot/$datestr/output fi git checkout feature/stable-nightly total=$(($total+$?)) if [ $total -ne 0 ]; then - echo "Unable to cp CMakeLists" >> $stableroot/$datestr/output + echo "Unable to checkout feature/stable-nightly" >> $stableroot/$datestr/output fi # merge in develop git merge develop @@ -108,17 +93,16 @@ if [ $ci_status -eq 0 ]; then if [ $total -ne 0 ]; then echo "Unable to merge develop" >> $stableroot/$datestr/output fi - # force move the copy to the original path of CMakeLists.txt - /bin/mv -f $gdasdir/CMakeLists.txt.new $gdasdir/CMakeLists.txt + # add in submodules + git stash pop total=$(($total+$?)) if [ $total -ne 0 ]; then - echo "Unable to mv CMakeLists" >> $stableroot/$datestr/output + echo "Unable to git stash pop" >> $stableroot/$datestr/output fi - # commit this change and push - git add CMakeLists.txt + $my_dir/../ush/submodules/add_submodules.sh $gdasdir total=$(($total+$?)) if [ $total -ne 0 ]; then - echo "Unable to add CMakeLists to commit" >> $stableroot/$datestr/output + echo "Unable to add updated submodules to commit" >> $stableroot/$datestr/output fi git diff-index --quiet HEAD || git commit -m "Update to new stable build on $datestr" total=$(($total+$?)) diff --git a/ci/stable_mark.sh b/ci/stable_mark.sh deleted file mode 100755 index b3a67a2d0..000000000 --- a/ci/stable_mark.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# -# Stolen from Travis via soca-science -# original script: -# https://github.com/JCSDA-internal/soca-science/blob/develop/.github/travisci/stable_mark.sh -# -set -e - -# figure out what git hash is associated with each repo in the bundle. -# Note that there are several places where this repo could exist. -bundle_dir=$1 -bundle_repos="oops vader saber ioda ufo iodaconv ioda-data ufo-data saber-data \ - fms fv3 femps fv3-jedi-lm fv3-jedi fv3-jedi-data gsibec \ - gsw mom6 soca" -for r in $bundle_repos; do - - echo "" - echo "Finding hash tag for $r..." - hash="none" - - # check the repo source ( for uncached repos, i.e. the main test repo) - if [[ "$hash" == "none" ]]; then - echo -n "searching src.. " - src_dir=$bundle_dir/$r - [[ -d $src_dir ]] \ - && cd $src_dir \ - && hash=$(git rev-parse HEAD || echo "none") - [[ "$hash" == "none" ]] && echo "NOT found" || echo "FOUND" - fi - - # if a git hash was found, update the bundle with a tagged version - echo "git_hash: $hash" - if [[ $hash != "none" ]]; then - hash=${hash:0:7} - echo "changing $r to $hash in $bundle_dir/CMakeLists.txt" - sed -i "s/\(.*PROJECT \+$r .*\)\(BRANCH\|TAG\) *\([a-zA-Z0-9\/\_\.\-]*\)\(.*\)/\1TAG $hash\4/g" $bundle_dir/CMakeLists.txt - fi -done diff --git a/modulefiles/GDAS/hercules.lua b/modulefiles/GDAS/hercules.lua new file mode 100644 index 000000000..e989af14a --- /dev/null +++ b/modulefiles/GDAS/hercules.lua @@ -0,0 +1,98 @@ +help([[ +Load environment for running the GDAS application with Intel compilers and MPI. +]]) + +local pkgName = myModuleName() +local pkgVersion = myModuleVersion() +local pkgNameVer = myModuleFullName() + +prepend_path("MODULEPATH", '/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core') +prepend_path("MODULEPATH", '/work2/noaa/da/python/opt/modulefiles/stack') + +-- below two lines get us access to the spack-stack modules +load("stack-intel/2021.9.0") +load("stack-intel-oneapi-mpi/2021.9.0") +--load("stack-python/3.10.8") +-- JCSDA has 'jedi-fv3-env/unified-dev', but we should load these manually as needed +load("cmake/3.23.1") +load("curl/8.1.2") +load("zlib/1.2.13") +load("git/2.40.0") +--load("pkg-config/0.27.1") +load("hdf5/1.14.0") +load("parallel-netcdf/1.12.2") +load("netcdf-c/4.9.2") +load("nccmp/1.9.0.1") +load("netcdf-fortran/4.6.0") +load("nco/5.0.6") +load("parallelio/2.5.10") +load("wget/1.21.1") +load("boost/1.78") +load("bufr/12.0.1") +load("git-lfs/3.1.2") +load("ecbuild/3.7.2") +load("openjpeg/2.4.0") +load("eccodes/2.27.0") +load("eigen/3.4.0") +load("openblas/0.3.19") +load("eckit/1.24.4") +load("fftw/3.3.10") +load("fckit/0.11.0") +load("fiat/1.2.0") +load("ectrans/1.2.0") +load("atlas/0.35.0") +load("sp/2.3.3") +load("gsl-lite/0.37.0") +load("libjpeg/2.1.0") +load("krb5/1.20.1") +load("libtirpc/1.2.6") +load("hdf/4.2.15") +load("jedi-cmake/1.4.0") +load("libpng/1.6.37") +load("libxt/1.1.5") +load("libxmu/1.1.4") +load("libxpm/3.5.12") +load("libxaw/1.0.13") +load("udunits/2.2.28") +load("ncview/2.1.8") +load("netcdf-cxx4/4.3.1") +load("py-pybind11/2.8.1") +--load("crtm/v2.4_jedi") +load("contrib/0.1") +load("noaatools/3.1") +load("rocoto/1.3.5") + +load("hpc/1.2.0") +unload("python/3.10.8") +unload("python/3.9.2") +load("miniconda3/4.6.14") +load("gdasapp/1.0.0") + +load("boost/1.78.0") +load("bufr/12.0.1") +load("fckit/0.11.0") +load("atlas/0.35.0") +load("py-pybind11/2.8.1") + +-- below is a hack because of cmake finding the wrong python... +setenv("CONDA_PREFIX", "/work2/noaa/da/python/opt/core/miniconda3/4.6.14/envs/gdasapp/") + +setenv("CC","mpiicc") +setenv("FC","mpiifort") +setenv("CXX","mpiicpc") +local mpiexec = '/opt/slurm/bin/srun' +local mpinproc = '-n' +setenv('MPIEXEC_EXEC', mpiexec) +setenv('MPIEXEC_NPROC', mpinproc) + +setenv('R2D2_CONFIG', '/work2/noaa/da/cmartin/GDASApp/R2D2_SHARED/config_orion.yaml') +setenv("CRTM_FIX","/work2/noaa/da/cmartin/GDASApp/fix/crtm/2.4.0") +setenv("GDASAPP_TESTDATA","/work2/noaa/da/cmartin/CI/GDASApp/data") +prepend_path("PATH","/apps/contrib/NCEP/libs/hpc-stack/intel-2018.4/prod_util/1.2.2/bin") + +execute{cmd="ulimit -s unlimited",modeA={"load"}} + +whatis("Name: ".. pkgName) +whatis("Version: ".. pkgVersion) +whatis("Category: GDASApp") +whatis("Description: Load all libraries needed for GDASApp") diff --git a/parm/aero/variational/3dvar_fgat_gfs_aero.yaml b/parm/aero/variational/3dvar_fgat_gfs_aero.yaml index 5887ca55a..2252493d8 100644 --- a/parm/aero/variational/3dvar_fgat_gfs_aero.yaml +++ b/parm/aero/variational/3dvar_fgat_gfs_aero.yaml @@ -49,7 +49,7 @@ cost function: observations: !INC ${OBS_LIST} variational: minimizer: - algorithm: DRIPCG + algorithm: DRPCG iterations: - ninner: 35 gradient norm reduction: 1e-10 diff --git a/parm/aero/variational/3dvar_gfs_aero.yaml b/parm/aero/variational/3dvar_gfs_aero.yaml index 719afad52..75371080f 100644 --- a/parm/aero/variational/3dvar_gfs_aero.yaml +++ b/parm/aero/variational/3dvar_gfs_aero.yaml @@ -40,7 +40,7 @@ cost function: observations: !INC ${OBS_LIST} variational: minimizer: - algorithm: DRIPCG + algorithm: DRPCG iterations: - ninner: 35 gradient norm reduction: 1e-10 diff --git a/parm/atm/variational/3dvar_dripcg.yaml b/parm/atm/variational/3dvar_drpcg.yaml similarity index 99% rename from parm/atm/variational/3dvar_dripcg.yaml rename to parm/atm/variational/3dvar_drpcg.yaml index ed6a15b5a..9464c7489 100644 --- a/parm/atm/variational/3dvar_dripcg.yaml +++ b/parm/atm/variational/3dvar_drpcg.yaml @@ -33,7 +33,7 @@ cost function: observations: !INC ${OBS_LIST} variational: minimizer: - algorithm: DRIPCG + algorithm: DRPCG iterations: - ninner: 2 gradient norm reduction: 1e-10 diff --git a/parm/soca/berror/soca_bump2d.yaml b/parm/soca/berror/old/soca_bump2d.yaml similarity index 100% rename from parm/soca/berror/soca_bump2d.yaml rename to parm/soca/berror/old/soca_bump2d.yaml diff --git a/parm/soca/berror/soca_bump_split.yaml b/parm/soca/berror/old/soca_bump_split.yaml similarity index 100% rename from parm/soca/berror/soca_bump_split.yaml rename to parm/soca/berror/old/soca_bump_split.yaml diff --git a/parm/soca/berror/saber_blocks.yaml b/parm/soca/berror/saber_blocks.yaml index b8965cf05..7fa507692 100644 --- a/parm/soca/berror/saber_blocks.yaml +++ b/parm/soca/berror/saber_blocks.yaml @@ -1,25 +1,35 @@ covariance model: hybrid components: - covariance: + # This will setup B = K D C_v C_h C_h^{T} C_v^{T} D K^{T} covariance model: SABER saber central block: - saber block name: ID - # This will setup B = K D C_v C_h C_h^{T} C_v^{T} D K^{T} + saber block name: EXPLICIT_DIFFUSION + active variables: [tocn, socn, ssh] + geometry: + mom6_input_nml: mom_input.nml + fields metadata: ./fields_metadata.yaml + group mapping: + - name: ocean + variables: [tocn, socn, socn, ssh] + - name: ice + variables: [cicen] + read: + groups: + - name: ocean + horizontal: + filename: hz_ocean.nc + vertical: + filename: vt_ocean.nc + - name: ice + horizontal: + filename: hz_ice.nc + linear variable change: input variables: [cicen, hicen, hsnon, socn, tocn, uocn, vocn, ssh] output variables: [cicen, hicen, hsnon, socn, tocn, uocn, vocn, ssh] linear variable changes: - - linear variable change name: HorizFiltSOCA - niter: 5 - filter variables: [cicen, socn, tocn, ssh] - - - linear variable change name: VertConvSOCA - Lz_min: 10.0 - Lz_mld: 0 - Lz_mld_max: 1.0 - scale_layer_thick: 5 - - linear variable change name: BkgErrFILT ocean_depth_min: 500 # [m] rescale_bkgerr: 1.0 diff --git a/parm/soca/berror/soca_parameters_diffusion_hz.yaml b/parm/soca/berror/soca_parameters_diffusion_hz.yaml new file mode 100644 index 000000000..9c2de874c --- /dev/null +++ b/parm/soca/berror/soca_parameters_diffusion_hz.yaml @@ -0,0 +1,37 @@ +geometry: &geom + mom6_input_nml: mom_input.nml + fields metadata: ./fields_metadata.yaml + +background: + read_from_file: 1 + basename: ./INPUT/ + ocn_filename: MOM.res.nc + ice_filename: cice.res.nc + date: '{{ATM_WINDOW_BEGIN}}' + state variables: [cicen, hicen, hsnon, socn, tocn, uocn, vocn, ssh] + +background error: + covariance model: SABER + saber central block: + saber block name: EXPLICIT_DIFFUSION + geometry: *geom + calibration: + normalization: + method: randomization + iterations: 10000 + + groups: + - name: ocean + horizontal: + from file: + filename: ocn.cor_rh.incr.0001-01-01T00:00:00Z.nc + variable name: ave_ssh + write: + filename: hz_ocean.nc + + - name: ice + horizontal: + as gaussian: true + fixed value: 50000.0 + write: + filename: hz_ice.nc diff --git a/parm/soca/berror/soca_parameters_diffusion_vt.yaml b/parm/soca/berror/soca_parameters_diffusion_vt.yaml new file mode 100644 index 000000000..8597bfe15 --- /dev/null +++ b/parm/soca/berror/soca_parameters_diffusion_vt.yaml @@ -0,0 +1,30 @@ +geometry: &geom + mom6_input_nml: mom_input.nml + fields metadata: ./fields_metadata.yaml + +background: + read_from_file: 1 + basename: ./INPUT/ + ocn_filename: MOM.res.nc + ice_filename: cice.res.nc + date: '{{ATM_WINDOW_BEGIN}}' + state variables: [cicen, hicen, hsnon, socn, tocn, uocn, vocn, ssh] + +background error: + covariance model: SABER + saber central block: + saber block name: EXPLICIT_DIFFUSION + geometry: *geom + calibration: + normalization: + # NOTE, not actually used here, since the normalization spec is only used for hz + method: randomization #< other option is "brute force" + iterations: 1000 #< in the real world you'll want to use 1e4 or so + + groups: + - name: vt_ocean + vertical: + as gaussian: true + fixed value: 5.0 + write: + filename: vt_ocean.nc diff --git a/parm/soca/berror/soca_setcorscales.yaml b/parm/soca/berror/soca_setcorscales.yaml index a06581ee3..0a91777a1 100644 --- a/parm/soca/berror/soca_setcorscales.yaml +++ b/parm/soca/berror/soca_setcorscales.yaml @@ -4,37 +4,13 @@ resolution: date: 0001-01-01T00:00:00Z -corr variables: [socn, tocn, ssh, uocn, vocn, cicen, hicen, hsnon] +corr variables: [ssh] scales: vert layers: 5 # in units of layer - socn: - rossby mult: 1.00 - min grid mult: 2.0 - tocn: - rossby mult: 1.00 - min grid mult: 2.0 - uocn: - rossby mult: 1.00 - min grid mult: 2.0 - vocn: - rossby mult: 1.00 - min grid mult: 2.0 ssh: rossby mult: 1.00 min grid mult: 2.0 - cicen: - rossby mult: 0.0 - min grid mult: 2.0 - min: 50.0 - hicen: - rossby mult: 0.0 - min grid mult: 4.0 - min: 150000.0 - hsnon: - rossby mult: 0.0 - min grid mult: 4.0 - min: 150000.0 rh output: datadir: ./ diff --git a/scripts/exgdas_global_marine_analysis_bmat.sh b/scripts/exgdas_global_marine_analysis_bmat.sh index 6afaa27e9..f75315bb3 100755 --- a/scripts/exgdas_global_marine_analysis_bmat.sh +++ b/scripts/exgdas_global_marine_analysis_bmat.sh @@ -86,6 +86,30 @@ if [ $err -gt 0 ]; then exit $err fi +################################################################################ +# Set decorrelation scales for the static B +$APRUN_OCNANAL $JEDI_BIN/soca_setcorscales.x soca_setcorscales.yaml +export err=$?; err_chk +if [ $err -gt 0 ]; then + exit $err +fi + +################################################################################ +# Initialize diffusion blocks +clean_yaml soca_parameters_diffusion_hz.yaml +$APRUN_OCNANAL $JEDI_BIN/soca_error_covariance_toolbox.x soca_parameters_diffusion_hz.yaml +export err=$?; err_chk +if [ $err -gt 0 ]; then + exit $err +fi + +clean_yaml soca_parameters_diffusion_vt.yaml +$APRUN_OCNANAL $JEDI_BIN/soca_error_covariance_toolbox.x soca_parameters_diffusion_vt.yaml +export err=$?; err_chk +if [ $err -gt 0 ]; then + exit $err +fi + ################################################################################ # Correlation and Localization operators shopt -s nullglob diff --git a/scripts/exgdas_global_marine_analysis_prep.py b/scripts/exgdas_global_marine_analysis_prep.py index d2cda4b1b..4829ffcf4 100755 --- a/scripts/exgdas_global_marine_analysis_prep.py +++ b/scripts/exgdas_global_marine_analysis_prep.py @@ -85,9 +85,10 @@ def cice_hist2fms(input_filename, output_filename): """ Simple reformatting utility to allow soca/fms to read CICE's history """ + input_filename_real = os.path.realpath(input_filename) # open the CICE history file - ds = xr.open_dataset(input_filename) + ds = xr.open_dataset(input_filename_real) if 'aicen' in ds.variables and 'hicen' in ds.variables and 'hsnon' in ds.variables: logging.info(f"*** Already reformatted, skipping.") @@ -100,7 +101,8 @@ def cice_hist2fms(input_filename, output_filename): ds = ds.rename({'aice_h': 'aicen', 'hi_h': 'hicen', 'hs_h': 'hsnon'}) # Save the new netCDF file - ds.to_netcdf(output_filename, mode='w') + output_filename_real = os.path.realpath(output_filename) + ds.to_netcdf(output_filename_real, mode='w') def test_hist_date(histfile, ref_date): @@ -278,7 +280,7 @@ def find_clim_ens(input_date): for obs_file in obs_files: logging.info(f"******* {obs_file}") obs_src = os.path.join(os.getenv('COM_OBS'), obs_file) - obs_dst = os.path.join(os.path.abspath(obs_in), obs_file) + obs_dst = os.path.join(os.path.realpath(obs_in), obs_file) logging.info(f"******* {obs_src}") if os.path.exists(obs_src): logging.info(f"******* fetching {obs_file}") @@ -305,20 +307,21 @@ def find_clim_ens(input_date): for mem in range(1, nmem_ens+1): for domain in ['ocn', 'ice']: # TODO(Guillaume): make use and define ensemble COM in the j-job + ensroot = os.getenv('COM_OCEAN_HISTORY_PREV') ensdir = os.path.join(os.getenv('COM_OCEAN_HISTORY_PREV'), '..', '..', '..', '..', '..', f'enkf{RUN}.{PDY}', f'{gcyc}', f'mem{str(mem).zfill(3)}', 'model_data', longname[domain], 'history') - ensdir = os.path.normpath(ensdir) + ensdir_real = os.path.realpath(ensdir) f009 = f'enkfgdas.t{gcyc}z.{domain}f009.nc' - fname_in = os.path.abspath(os.path.join(ensdir, f009)) - fname_out = os.path.abspath(os.path.join(static_ens, domain+"."+str(mem)+".nc")) + fname_in = os.path.abspath(os.path.join(ensdir_real, f009)) + fname_out = os.path.realpath(os.path.join(static_ens, domain+"."+str(mem)+".nc")) ens_member_list.append([fname_in, fname_out]) FileHandler({'copy': ens_member_list}).sync() # reformat the cice history output for mem in range(1, nmem_ens+1): - cice_fname = os.path.abspath(os.path.join(static_ens, "ice."+str(mem)+".nc")) + cice_fname = os.path.realpath(os.path.join(static_ens, "ice."+str(mem)+".nc")) cice_hist2fms(cice_fname, cice_fname) else: logging.info("---------------- Stage offline ensemble members") @@ -343,8 +346,8 @@ def find_clim_ens(input_date): # copy yaml for grid generation logging.info(f"---------------- generate gridgen.yaml") -gridgen_yaml_src = os.path.abspath(os.path.join(gdas_home, 'parm', 'soca', 'gridgen', 'gridgen.yaml')) -gridgen_yaml_dst = os.path.abspath(os.path.join(stage_cfg['stage_dir'], 'gridgen.yaml')) +gridgen_yaml_src = os.path.realpath(os.path.join(gdas_home, 'parm', 'soca', 'gridgen', 'gridgen.yaml')) +gridgen_yaml_dst = os.path.realpath(os.path.join(stage_cfg['stage_dir'], 'gridgen.yaml')) FileHandler({'copy': [[gridgen_yaml_src, gridgen_yaml_dst]]}).sync() ################################################################################ @@ -373,6 +376,33 @@ def find_clim_ens(input_date): locscales_yaml_dst = os.path.join(stage_cfg['stage_dir'], 'soca_setlocscales.yaml') FileHandler({'copy': [[locscales_yaml_src, locscales_yaml_dst]]}).sync() +################################################################################ +# copy yaml for correlation length scales + +logging.info(f"---------------- generate soca_setcorscales.yaml") +corscales_yaml_src = os.path.join(gdas_home, 'parm', 'soca', 'berror', 'soca_setcorscales.yaml') +corscales_yaml_dst = os.path.join(stage_cfg['stage_dir'], 'soca_setcorscales.yaml') +FileHandler({'copy': [[corscales_yaml_src, corscales_yaml_dst]]}).sync() + +################################################################################ +# copy yaml for diffusion initialization + +logging.info(f"---------------- generate soca_parameters_diffusion_hz.yaml") +diffu_hz_yaml = os.path.join(anl_dir, 'soca_parameters_diffusion_hz.yaml') +diffu_hz_yaml_dir = os.path.join(gdas_home, 'parm', 'soca', 'berror') +diffu_hz_yaml_template = os.path.join(berror_yaml_dir, 'soca_parameters_diffusion_hz.yaml') +config = YAMLFile(path=diffu_hz_yaml_template) +config = Template.substitute_structure(config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig.get) +config.save(diffu_hz_yaml) + +logging.info(f"---------------- generate soca_parameters_diffusion_vt.yaml") +diffu_vt_yaml = os.path.join(anl_dir, 'soca_parameters_diffusion_vt.yaml') +diffu_vt_yaml_dir = os.path.join(gdas_home, 'parm', 'soca', 'berror') +diffu_vt_yaml_template = os.path.join(berror_yaml_dir, 'soca_parameters_diffusion_vt.yaml') +config = YAMLFile(path=diffu_vt_yaml_template) +config = Template.substitute_structure(config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig.get) +config.save(diffu_vt_yaml) + ################################################################################ # generate yaml for bump/nicas (used for correlation and/or localization) diff --git a/scripts/exgdas_global_marine_analysis_vrfy.py b/scripts/exgdas_global_marine_analysis_vrfy.py index 6179c00e7..11a9694d9 100755 --- a/scripts/exgdas_global_marine_analysis_vrfy.py +++ b/scripts/exgdas_global_marine_analysis_vrfy.py @@ -26,9 +26,9 @@ import subprocess from datetime import datetime, timedelta -comout = os.getenv('COM_OCEAN_ANALYSIS') -com_ice_history = os.getenv('COM_ICE_HISTORY_PREV') -com_ocean_history = os.getenv('COM_OCEAN_HISTORY_PREV') +comout = os.path.realpath(os.getenv('COM_OCEAN_ANALYSIS')) +com_ice_history = os.path.realpath(os.getenv('COM_ICE_HISTORY_PREV')) +com_ocean_history = os.path.realpath(os.getenv('COM_OCEAN_HISTORY_PREV')) cyc = os.getenv('cyc') RUN = os.getenv('RUN') gcyc = str((int(cyc) - 6) % 24).zfill(2) diff --git a/scripts/exglobal_prep_ocean_obs.py b/scripts/exglobal_prep_ocean_obs.py index c35689ead..437f18234 100755 --- a/scripts/exglobal_prep_ocean_obs.py +++ b/scripts/exglobal_prep_ocean_obs.py @@ -4,7 +4,7 @@ from datetime import datetime, timedelta import logging import os -import prep_marine_obs +from soca import prep_marine_obs import subprocess from wxflow import YAMLFile, save_as_yaml, FileHandler diff --git a/sorc/CMakeLists.txt b/sorc/CMakeLists.txt new file mode 100644 index 000000000..435ca013b --- /dev/null +++ b/sorc/CMakeLists.txt @@ -0,0 +1,137 @@ +# ------------------------------------------------------------------------- # +# JEDI GDAS Bundle # +# ------------------------------------------------------------------------- # + +# Check for minimim cmake requirement +cmake_minimum_required( VERSION 3.20 FATAL_ERROR ) + +find_package(ecbuild 3.5 REQUIRED HINTS ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/../ecbuild) + +project(GDAS-bundle VERSION 1.0.0 LANGUAGES C CXX Fortran ) + +include(GNUInstallDirs) +enable_testing() + +# Build type. +if(NOT CMAKE_BUILD_TYPE MATCHES "^(Debug|Release|RelWithDebInfo|MinSizeRel)$") + message(STATUS "Setting build type to 'Release' as none was specified.") + set(CMAKE_BUILD_TYPE + "Release" + CACHE STRING "Choose the type of build." FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + "MinSizeRel" "RelWithDebInfo") +endif() + +# Find dependencies. +find_package(Python3 REQUIRED COMPONENTS Interpreter) + +# Include ecbuild_bundle macro +include( ecbuild_bundle ) + +# Enable MPI +set( ENABLE_MPI ON CACHE BOOL "Compile with MPI" ) + +# Handle user options. +option(BUILD_GDASBUNDLE "Build GDAS Bundle" ON) +option(CLONE_JCSDADATA "Clone JCSDA test data repositories" OFF) +option(WORKFLOW_TESTS "Include global-workflow dependent tests" OFF) + +# Initialize bundle +# ----------------- +ecbuild_bundle_initialize() + +# Build bundle source code. +if(BUILD_GDASBUNDLE) + +# jedi-cmake + ecbuild_bundle( PROJECT jedicmake SOURCE "./jedicmake" ) + include( jedicmake/cmake/Functions/git_functions.cmake ) + +# ECMWF libraries + option("BUNDLE_SKIP_ECKIT" "Don't build eckit" "ON" ) # Skip eckit build unless user passes -DBUNDLE_SKIP_ECKIT=OFF + option("BUNDLE_SKIP_FCKIT" "Don't build fckit" "ON") # Skip fckit build unless user passes -DBUNDLE_SKIP_FCKIT=OFF + option("BUNDLE_SKIP_ATLAS" "Don't build atlas" "ON") # Skip atlas build unless user passes -DBUNDLE_SKIP_ATLAS=OFF + +# turn off optional OOPS toy models + option( ENABLE_LORENZ95_MODEL "Build LORENZ95 toy model" OFF ) + option( ENABLE_QG_MODEL "Build QG toy model" OFF ) + + ecbuild_bundle( PROJECT eckit GIT "https://github.com/ecmwf/eckit.git" TAG 1.16.0 ) + ecbuild_bundle( PROJECT fckit GIT "https://github.com/ecmwf/fckit.git" TAG 0.9.2 ) + ecbuild_bundle( PROJECT atlas GIT "https://github.com/ecmwf/atlas.git" TAG 0.35.0 ) + +# External (required) observation operators + # TODO remove the CRTM from here and use it as a library + #option("BUNDLE_SKIP_CRTM" "Don't build CRTM" "OFF") # Don't build crtm unless user passes -DBUNDLE_SKIP_CRTM=OFF + #ecbuild_bundle( PROJECT crtm GIT "https://github.com/JCSDA/crtm.git" TAG v2.4.1-jedi.1 ) + ecbuild_bundle( PROJECT crtm SOURCE "./crtm" ) + +# Build GSI-B + option(BUILD_GSIBEC "Build GSI-B" OFF) + if(BUILD_GSIBEC) + ecbuild_bundle( PROJECT gsibec SOURCE "./gsibec" ) + endif() + +# Gibbs seawater + ecbuild_bundle( PROJECT gsw SOURCE "./gsw" ) + +# Core JEDI repositories + ecbuild_bundle( PROJECT oops SOURCE "./oops" ) + ecbuild_bundle( PROJECT vader SOURCE "./vader" ) + ecbuild_bundle( PROJECT saber SOURCE "./saber" ) + option(ENABLE_IODA_DATA "Obtain ioda test data from ioda-data repository (vs tarball)" ON) + ecbuild_bundle( PROJECT ioda SOURCE "./ioda" ) + option(ENABLE_UFO_DATA "Obtain ufo test data from ufo-data repository (vs tarball)" ON) + ecbuild_bundle( PROJECT ufo SOURCE "./ufo" ) + +# FMS and FV3 dynamical core + ecbuild_bundle( PROJECT fms SOURCE "./fms" ) + ecbuild_bundle( PROJECT fv3 SOURCE "./fv3" ) + +# fv3-jedi and associated repositories + ecbuild_bundle( PROJECT femps SOURCE "./femps" ) + ecbuild_bundle( PROJECT fv3-jedi-lm SOURCE "./fv3-jedi-lm" ) + option(ENABLE_FV3_JEDI_DATA "Obtain fv3-jedi test data from fv3-jedi-data repository (vs tarball)" ON) + ecbuild_bundle( PROJECT fv3-jedi SOURCE "./fv3-jedi" ) + +# SOCA associated repositories +# TODO: Move the Icepack fork to EMC github + set(BUILD_ICEPACK "ON" CACHE STRING "Build the icepack library") + if ( BUILD_ICEPACK ) + ecbuild_bundle( PROJECT icepack SOURCE "./icepack" ) + endif() + ecbuild_bundle( PROJECT mom6 SOURCE "./mom6" ) + ecbuild_bundle( PROJECT soca SOURCE "./soca" ) + + # Build JEDI/DA or other peripherals + ecbuild_bundle( PROJECT gdas-utils SOURCE "../utils" ) + +# Build IODA converters + option(BUILD_IODA_CONVERTERS "Build IODA Converters" ON) + if(BUILD_IODA_CONVERTERS) + ecbuild_bundle( PROJECT iodaconv SOURCE "./iodaconv" ) + endif() + +# Land associated repositories + ecbuild_bundle( PROJECT land-imsproc SOURCE "./land-imsproc" ) + ecbuild_bundle( PROJECT land-jediincr SOURCE "./land-jediincr" ) + +# GDASApp + ecbuild_bundle( PROJECT gdas SOURCE "../") + +# ioda, ufo, fv3-jedi, and saber test data +#--------------------------------- + if(CLONE_JCSDADATA) + + set(JCSDA_DATA_ROOT "$ENV{GDASAPP_TESTDATA}/jcsda") + ecbuild_bundle( PROJECT ioda-data SOURCE "${JCSDA_DATA_ROOT}/ioda-data" ) + ecbuild_bundle( PROJECT ufo-data SOURCE "${JCSDA_DATA_ROOT}/ufo-data" ) + ecbuild_bundle( PROJECT fv3-jedi-data SOURCE "${JCSDA_DATA_ROOT}/fv3-jedi-data" ) + + endif(CLONE_JCSDADATA) + +endif(BUILD_GDASBUNDLE) + +# Finalize bundle +# --------------- +ecbuild_bundle_finalize() diff --git a/sorc/crtm b/sorc/crtm new file mode 160000 index 000000000..bb7adbfc4 --- /dev/null +++ b/sorc/crtm @@ -0,0 +1 @@ +Subproject commit bb7adbfc4f3deadedac98743bcb4e18a2bca11f9 diff --git a/sorc/femps b/sorc/femps new file mode 160000 index 000000000..cb396811e --- /dev/null +++ b/sorc/femps @@ -0,0 +1 @@ +Subproject commit cb396811eb26380478c4d3f177d95096ed2ddd8f diff --git a/sorc/fms b/sorc/fms new file mode 160000 index 000000000..1f739141e --- /dev/null +++ b/sorc/fms @@ -0,0 +1 @@ +Subproject commit 1f739141ef8b000a0bd75ae8bebfadea340299ba diff --git a/sorc/fv3 b/sorc/fv3 new file mode 160000 index 000000000..61450b4e3 --- /dev/null +++ b/sorc/fv3 @@ -0,0 +1 @@ +Subproject commit 61450b4e3e80bb96b26c5f3808ce60b5e5cb4207 diff --git a/sorc/fv3-jedi b/sorc/fv3-jedi new file mode 160000 index 000000000..b4bc84192 --- /dev/null +++ b/sorc/fv3-jedi @@ -0,0 +1 @@ +Subproject commit b4bc841922609c28457e0899a397d001508a34de diff --git a/sorc/fv3-jedi-lm b/sorc/fv3-jedi-lm new file mode 160000 index 000000000..05cc1ae63 --- /dev/null +++ b/sorc/fv3-jedi-lm @@ -0,0 +1 @@ +Subproject commit 05cc1ae63252ca535f3db0fdca9a8a996329fc8f diff --git a/sorc/gsibec b/sorc/gsibec new file mode 160000 index 000000000..17823aef7 --- /dev/null +++ b/sorc/gsibec @@ -0,0 +1 @@ +Subproject commit 17823aef706e5d69e373e4c4c5dfadac99d5b800 diff --git a/sorc/gsw b/sorc/gsw new file mode 160000 index 000000000..1a02ebaf6 --- /dev/null +++ b/sorc/gsw @@ -0,0 +1 @@ +Subproject commit 1a02ebaf6f7a4e9f2c2d2dd973fb050e697bcc74 diff --git a/sorc/icepack b/sorc/icepack new file mode 160000 index 000000000..73136ee8d --- /dev/null +++ b/sorc/icepack @@ -0,0 +1 @@ +Subproject commit 73136ee8dcdbe378821e540488a5980a03d8abe6 diff --git a/sorc/ioda b/sorc/ioda new file mode 160000 index 000000000..f395e1d33 --- /dev/null +++ b/sorc/ioda @@ -0,0 +1 @@ +Subproject commit f395e1d33b3c6ce7b970da25f589d5fd92f5197b diff --git a/sorc/iodaconv b/sorc/iodaconv new file mode 160000 index 000000000..01adb65f6 --- /dev/null +++ b/sorc/iodaconv @@ -0,0 +1 @@ +Subproject commit 01adb65f60dcbb7e83b50d25a82d81e915bdcb6d diff --git a/sorc/jedicmake b/sorc/jedicmake new file mode 160000 index 000000000..36fc99bdf --- /dev/null +++ b/sorc/jedicmake @@ -0,0 +1 @@ +Subproject commit 36fc99bdff5d3d8835480b37a3dcc75e5f8da256 diff --git a/sorc/land-imsproc b/sorc/land-imsproc new file mode 160000 index 000000000..6373819ca --- /dev/null +++ b/sorc/land-imsproc @@ -0,0 +1 @@ +Subproject commit 6373819ca034d66523cb7852cd7b1b66f3f8ae07 diff --git a/sorc/land-jediincr b/sorc/land-jediincr new file mode 160000 index 000000000..2923344b3 --- /dev/null +++ b/sorc/land-jediincr @@ -0,0 +1 @@ +Subproject commit 2923344b33511d80c685c30261ad37896eb50768 diff --git a/sorc/mom6 b/sorc/mom6 new file mode 160000 index 000000000..51ec489ad --- /dev/null +++ b/sorc/mom6 @@ -0,0 +1 @@ +Subproject commit 51ec489ad7d8a86762bef4c46eabd9af5fc41fa4 diff --git a/sorc/oops b/sorc/oops new file mode 160000 index 000000000..60f93924f --- /dev/null +++ b/sorc/oops @@ -0,0 +1 @@ +Subproject commit 60f93924fe446714fcb04d96f6930a760db74b23 diff --git a/sorc/saber b/sorc/saber new file mode 160000 index 000000000..4f1956c38 --- /dev/null +++ b/sorc/saber @@ -0,0 +1 @@ +Subproject commit 4f1956c38267200feff91a8e4231e82df3885637 diff --git a/sorc/soca b/sorc/soca new file mode 160000 index 000000000..5783fd72a --- /dev/null +++ b/sorc/soca @@ -0,0 +1 @@ +Subproject commit 5783fd72ace301b07a9c264595c82c31e7e872b6 diff --git a/sorc/ufo b/sorc/ufo new file mode 160000 index 000000000..e41a1c928 --- /dev/null +++ b/sorc/ufo @@ -0,0 +1 @@ +Subproject commit e41a1c928150944795ed1276c84c4f1e37a47c99 diff --git a/sorc/vader b/sorc/vader new file mode 160000 index 000000000..2baeb8dfa --- /dev/null +++ b/sorc/vader @@ -0,0 +1 @@ +Subproject commit 2baeb8dfa8781a67bcf386bf152f02619b748298 diff --git a/test/aero/global-workflow/setup_workflow_exp.sh b/test/aero/global-workflow/setup_workflow_exp.sh index 288021a8d..b9f495cc0 100755 --- a/test/aero/global-workflow/setup_workflow_exp.sh +++ b/test/aero/global-workflow/setup_workflow_exp.sh @@ -10,16 +10,16 @@ edate=2021032318 app=ATMA starttype='warm' gfscyc='4' -resdet='48' -resens='48' +resdetatmos='48' +resensatmos='48' nens=0 pslot='gdas_test' configdir=$srcdir/../../parm/config/gfs -comrot=$bindir/test/aero/global-workflow/testrun/ROTDIRS +comroot=$bindir/test/aero/global-workflow/testrun/ROTDIRS expdir=$bindir/test/aero/global-workflow/testrun/experiments # clean previous experiment -rm -rf $comrot $expdir config +rm -rf $comroot $expdir config # copy config.yaml to local config cp -r $configdir config @@ -41,12 +41,12 @@ $srcdir/../../workflow/setup_expt.py gfs cycled --idate $idate \ --app $app \ --start $starttype \ --gfs_cyc $gfscyc \ - --resdet $resdet \ - --resens $resens \ + --resdetatmos $resdetatmos \ + --resensatmos $resensatmos \ --nens $nens \ --pslot $pslot \ --configdir $expdir/../config \ - --comrot $comrot \ + --comroot $comroot \ --expdir $expdir \ --yaml $expdir/../config.yaml diff --git a/test/atm/CMakeLists.txt b/test/atm/CMakeLists.txt index 6dd33d72d..d660cf599 100644 --- a/test/atm/CMakeLists.txt +++ b/test/atm/CMakeLists.txt @@ -4,10 +4,10 @@ if (BUILD_GDASBUNDLE) # link input file from iodaconv to test directory - file(CREATE_LINK ${PROJECT_SOURCE_DIR}/iodaconv/test/testinput/gdas.t06z.adpsfc.tm00.bufr_d ${PROJECT_BINARY_DIR}/test/testdata/gdas.t06z.adpsfc.tm00.bufr_d SYMBOLIC) + file(CREATE_LINK ${CMAKE_SOURCE_DIR}/iodaconv/test/testinput/gdas.t06z.adpsfc.tm00.bufr_d ${PROJECT_BINARY_DIR}/test/testdata/gdas.t06z.adpsfc.tm00.bufr_d SYMBOLIC) # test convert BUFR to IODA add_test(NAME test_gdasapp_convert_bufr_adpsfc - COMMAND ${PROJECT_BINARY_DIR}/bin/bufr2ioda.x ${PROJECT_BINARY_DIR}/test/testinput/bufr_adpsfc.yaml + COMMAND ${CMAKE_BINARY_DIR}/bin/bufr2ioda.x ${PROJECT_BINARY_DIR}/test/testinput/bufr_adpsfc.yaml WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) # test generate UFO CRTM sat bias files from GSI sat bias files diff --git a/test/atm/global-workflow/config.atmanl b/test/atm/global-workflow/config.atmanl index 147a042ae..a2660e055 100755 --- a/test/atm/global-workflow/config.atmanl +++ b/test/atm/global-workflow/config.atmanl @@ -8,7 +8,7 @@ echo "BEGIN: config.atmanl" export CASE_ANL="@ATMRES_ANL@" export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ export OBS_LIST=@OBS_LIST@ -export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_drpcg.yaml export STATICB_TYPE="identity" export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml export INTERP_METHOD='barycentric' diff --git a/test/atm/global-workflow/config.yaml b/test/atm/global-workflow/config.yaml index 430d73eda..ec37c4f79 100644 --- a/test/atm/global-workflow/config.yaml +++ b/test/atm/global-workflow/config.yaml @@ -1,4 +1,5 @@ base: + HOMEgfs: "@topdir@" DO_JEDIATMVAR: "YES" DO_JEDIATMENS: "YES" DATAPATH: "@bindir@/test/atm/global-workflow/testrun" diff --git a/test/atm/global-workflow/jjob_ens_final.sh b/test/atm/global-workflow/jjob_ens_final.sh index ae1928a18..f7002dd60 100755 --- a/test/atm/global-workflow/jjob_ens_final.sh +++ b/test/atm/global-workflow/jjob_ens_final.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -25,7 +26,7 @@ export NMEM_ENS=3 export ACCOUNT=da-cpu # Set python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH @@ -33,11 +34,11 @@ export PYTHONPATH machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') # Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then NETCDF=$( which ncdump ) export NETCDF export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then ncdump=$( which ncdump ) NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) export NETCDF @@ -45,10 +46,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE fi diff --git a/test/atm/global-workflow/jjob_ens_init.sh b/test/atm/global-workflow/jjob_ens_init.sh index 8966f2b69..2e5cefc66 100755 --- a/test/atm/global-workflow/jjob_ens_init.sh +++ b/test/atm/global-workflow/jjob_ens_init.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -30,7 +31,7 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/parm/config/gfs/config.com" # Set python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH @@ -38,11 +39,11 @@ export PYTHONPATH machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') # Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then NETCDF=$( which ncdump ) export NETCDF export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then ncdump=$( which ncdump ) NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) export NETCDF @@ -69,7 +70,7 @@ dpath=gdas.$PDY/$cyc/obs mkdir -p $COM_OBS flist="amsua_n19.$CDATE.nc4 sondes.$CDATE.nc4" for file in $flist; do - ln -fs $GDASAPP_TESTDATA/lowres/$dpath/${oprefix}.$file $COM_OBS/ + ln -fs $GDASAPP_TESTDATA/lowres/$dpath/${oprefix}.$file $COM_OBS/${oprefix}.$file done # Link radiance bias correction files @@ -77,7 +78,7 @@ dpath=gdas.$gPDY/$gcyc/analysis/atmos mkdir -p $COM_ATMOS_ANALYSIS_PREV flist="amsua_n19.satbias.nc4 amsua_n19.satbias_cov.nc4 amsua_n19.tlapse.txt" for file in $flist; do - ln -fs $GDASAPP_TESTDATA/lowres/$dpath/$gprefix.$file $COM_ATMOS_ANALYSIS_PREV/ + ln -fs $GDASAPP_TESTDATA/lowres/$dpath/$gprefix.$file $COM_ATMOS_ANALYSIS_PREV/$gprefix.$file done # Link member atmospheric background on tiles and atmf006 @@ -104,10 +105,10 @@ for imem in $(seq 1 $NMEM_ENS); do done # Execute j-job -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE fi diff --git a/test/atm/global-workflow/jjob_ens_run.sh b/test/atm/global-workflow/jjob_ens_run.sh index 597afcd43..6844e6973 100755 --- a/test/atm/global-workflow/jjob_ens_run.sh +++ b/test/atm/global-workflow/jjob_ens_run.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -28,7 +29,7 @@ export NMEM_ENS=3 export ACCOUNT=da-cpu # Set python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH @@ -36,11 +37,11 @@ export PYTHONPATH machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') # Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then NETCDF=$( which ncdump ) export NETCDF export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then ncdump=$( which ncdump ) NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) export NETCDF @@ -48,10 +49,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then + sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN -elif [ $machine = 'ORION' ]; then - sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --partition=orion --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN else ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN fi diff --git a/test/atm/global-workflow/jjob_var_final.sh b/test/atm/global-workflow/jjob_var_final.sh index f9b64afbe..74d46cd98 100755 --- a/test/atm/global-workflow/jjob_var_final.sh +++ b/test/atm/global-workflow/jjob_var_final.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -25,7 +26,7 @@ export NMEM_ENS=0 export ACCOUNT=da-cpu # Set python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH @@ -33,11 +34,11 @@ export PYTHONPATH machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') # Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then NETCDF=$( which ncdump ) export NETCDF export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then ncdump=$( which ncdump ) NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) export NETCDF @@ -45,10 +46,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE fi diff --git a/test/atm/global-workflow/jjob_var_init.sh b/test/atm/global-workflow/jjob_var_init.sh index 0d88c2174..32fc19537 100755 --- a/test/atm/global-workflow/jjob_var_init.sh +++ b/test/atm/global-workflow/jjob_var_init.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -30,7 +31,7 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/parm/config/gfs/config.com" # Set python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH @@ -38,11 +39,11 @@ export PYTHONPATH machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') # Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then NETCDF=$( which ncdump ) export NETCDF export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then ncdump=$( which ncdump ) NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) export NETCDF @@ -71,7 +72,7 @@ dpath=gdas.$PDY/$cyc/obs mkdir -p $COM_OBS flist="amsua_n19.$CDATE.nc4 sondes.$CDATE.nc4" for file in $flist; do - ln -fs $GDASAPP_TESTDATA/lowres/$dpath/${oprefix}.$file $COM_OBS/ + ln -fs $GDASAPP_TESTDATA/lowres/$dpath/${oprefix}.$file $COM_OBS/${oprefix}.$file done # Link radiance bias correction files @@ -79,7 +80,7 @@ dpath=gdas.$gPDY/$gcyc/analysis/atmos mkdir -p $COM_ATMOS_ANALYSIS_PREV flist="amsua_n19.satbias.nc4 amsua_n19.satbias_cov.nc4 amsua_n19.tlapse.txt" for file in $flist; do - ln -fs $GDASAPP_TESTDATA/lowres/$dpath/$gprefix.$file $COM_ATMOS_ANALYSIS_PREV/ + ln -fs $GDASAPP_TESTDATA/lowres/$dpath/$gprefix.$file $COM_ATMOS_ANALYSIS_PREV/$gprefix.$file done # Link atmospheric background on gaussian grid @@ -87,7 +88,7 @@ dpath=gdas.$gPDY/$gcyc/model_data/atmos/history mkdir -p $COM_ATMOS_HISTORY_PREV flist="atmf006.nc" for file in $flist; do - ln -fs $GDASAPP_TESTDATA/lowres/$dpath/${gprefix}.${file} $COM_ATMOS_HISTORY_PREV/ + ln -fs $GDASAPP_TESTDATA/lowres/$dpath/${gprefix}.${file} $COM_ATMOS_HISTORY_PREV/${gprefix}.${file} done # Link atmospheric bacgkround on tiles @@ -96,7 +97,7 @@ COM_ATMOS_RESTART_PREV_DIRNAME=$(dirname $COM_ATMOS_RESTART_PREV) mkdir -p $COM_ATMOS_RESTART_PREV_DIRNAME flist="restart" for file in $flist; do - ln -fs $GDASAPP_TESTDATA/lowres/$dpath/$file $COM_ATMOS_RESTART_PREV_DIRNAME/ + ln -fs $GDASAPP_TESTDATA/lowres/$dpath/$file $COM_ATMOS_RESTART_PREV_DIRNAME/$file done @@ -125,10 +126,10 @@ done # Execute j-job -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE fi diff --git a/test/atm/global-workflow/jjob_var_run.sh b/test/atm/global-workflow/jjob_var_run.sh index a78b1e10e..e7471bec1 100755 --- a/test/atm/global-workflow/jjob_var_run.sh +++ b/test/atm/global-workflow/jjob_var_run.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -28,7 +29,7 @@ export NMEM_ENS=0 export ACCOUNT=da-cpu # Set python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH @@ -36,11 +37,11 @@ export PYTHONPATH machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') # Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then NETCDF=$( which ncdump ) export NETCDF export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then ncdump=$( which ncdump ) NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) export NETCDF @@ -48,10 +49,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' ]; then +if [[ $machine = 'HERA' ]]; then + sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN +elif [[ $machine = 'ORION' || $machine = 'HERCULES' ]]; then sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN else ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN fi diff --git a/test/atm/global-workflow/setup_workflow_exp.sh b/test/atm/global-workflow/setup_workflow_exp.sh index 7eb6751db..67bfd3b03 100755 --- a/test/atm/global-workflow/setup_workflow_exp.sh +++ b/test/atm/global-workflow/setup_workflow_exp.sh @@ -3,6 +3,7 @@ set -x # ctest to create an experiment directory for global-workflow bindir=$1 srcdir=$2 +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) # test experiment variables idate=2021032312 @@ -10,16 +11,16 @@ edate=2021032318 app=ATM starttype='warm' gfscyc='4' -resdet='48' -resens='48' +resdetatmos='48' +resensatmos='48' nens=3 pslot='gdas_test' configdir=$srcdir/../../parm/config/gfs -comrot=$bindir/test/atm/global-workflow/testrun/ROTDIRS +comroot=$bindir/test/atm/global-workflow/testrun/ROTDIRS expdir=$bindir/test/atm/global-workflow/testrun/experiments # clean previous experiment -rm -rf $comrot $expdir config +rm -rf $comroot $expdir config # copy config.yaml to local config cp -r $configdir config @@ -27,26 +28,24 @@ cp $srcdir/test/atm/global-workflow/config.atmanl config/ cp $srcdir/test/atm/global-workflow/config.yaml . # update paths in config.yaml +sed -i -e "s~@topdir@~${topdir}~g" config.yaml sed -i -e "s~@bindir@~${bindir}~g" config.yaml sed -i -e "s~@srcdir@~${srcdir}~g" config.yaml sed -i -e "s~@dumpdir@~${GDASAPP_TESTDATA}/lowres~g" config.yaml # run the script -ln -sf $srcdir/../../workflow/setup_expt.py . - - echo "Running global-workflow experiment generation script" $srcdir/../../workflow/setup_expt.py gfs cycled --idate $idate \ --edate $edate \ --app $app \ --start $starttype \ --gfs_cyc $gfscyc \ - --resdet $resdet \ - --resens $resens \ + --resdetatmos $resdetatmos \ + --resensatmos $resensatmos \ --nens $nens \ --pslot $pslot \ --configdir $expdir/../config \ - --comrot $comrot \ + --comroot $comroot \ --expdir $expdir \ --yaml $expdir/../config.yaml diff --git a/test/land/CMakeLists.txt b/test/land/CMakeLists.txt index edaff5a9f..2f8879768 100644 --- a/test/land/CMakeLists.txt +++ b/test/land/CMakeLists.txt @@ -32,6 +32,6 @@ set_tests_properties(test_gdasapp_land_letkfoi_snowda # Test convert BUFR to IODA add_test(NAME test_gdasapp_convert_bufr_adpsfc_snow - COMMAND ${PROJECT_SOURCE_DIR}/test/land/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} bufr_adpsfc_snow + COMMAND ${PROJECT_SOURCE_DIR}/test/land/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${PROJECT_SOURCE_DIR} ${CMAKE_BINARY_DIR} bufr_adpsfc_snow WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) diff --git a/test/land/letkfoi_snowda.sh b/test/land/letkfoi_snowda.sh index 64086ba07..d21672dd9 100755 --- a/test/land/letkfoi_snowda.sh +++ b/test/land/letkfoi_snowda.sh @@ -66,7 +66,7 @@ fi mkdir -p Data diags mkdir -p Data/fieldmetadata ln -s ${project_source_dir}/parm/io/fv3jedi_fieldmetadata_restart.yaml Data/fieldmetadata/. -ln -s ${project_binary_dir}/fv3-jedi/test/Data/fv3files Data/fv3files +ln -s ${project_source_dir}/sorc/fv3-jedi/test/Data/fv3files Data/fv3files ln -s ${project_source_dir}/test/land/letkfoi_land.yaml letkf_land.yaml ln -s ${OBSDIR}/snow_depth/GTS/202103/adpsfc_snow_2021032318.nc4 adpsfc_snow.nc4 ln -s ${OBSDIR} Data/land diff --git a/test/land/test_bufr2ioda.sh b/test/land/test_bufr2ioda.sh index f21e30f2f..e4e3b6235 100755 --- a/test/land/test_bufr2ioda.sh +++ b/test/land/test_bufr2ioda.sh @@ -3,9 +3,11 @@ set -x PROJECT_BINARY_DIR=${1} -OBSSOURCE=${2} -CMD=${PROJECT_BINARY_DIR}/bin/bufr2ioda.x -OBSYAML=${PROJECT_BINARY_DIR}/../ush/land/${OBSSOURCE}.yaml +PROJECT_SOURCE_DIR=${2} +CMAKE_BINARY_DIR=${3} +OBSSOURCE=${4} +CMD=${CMAKE_BINARY_DIR}/bin/bufr2ioda.x +OBSYAML=${PROJECT_SOURCE_DIR}/ush/land/${OBSSOURCE}.yaml OUTFILE=`grep obsdataout ${OBSYAML} | cut -d'"' -f2` diff --git a/test/setup_workflow_exp.sh b/test/setup_workflow_exp.sh index 5f16b0ecd..3123a7570 100755 --- a/test/setup_workflow_exp.sh +++ b/test/setup_workflow_exp.sh @@ -9,21 +9,21 @@ edate=2021032418 app=ATM # NOTE make this S2SWA soon starttype='warm' gfscyc='4' -resdet='48' -resens='48' +resdetatmos='48' +resensatmos='48' nens=0 pslot='gdas_test' configdir=$srcdir/../../parm/config/gfs -comrot=$bindir/test/testrun/ROTDIRS +comroot=$bindir/test/testrun/ROTDIRS expdir=$bindir/test/testrun/experiments # clean previous experiment -rm -rf "${comrot}" "${expdir}" +rm -rf "${comroot}" "${expdir}" # run the script cd $srcdir/../../workflow echo "Running global-workflow experiment generation script" -./setup_expt.py gfs cycled --idate $idate --edate $edate --app $app --start $starttype --gfs_cyc $gfscyc --resdet $resdet --resens $resens --nens $nens --pslot $pslot --configdir $configdir --comrot $comrot --expdir $expdir +./setup_expt.py gfs cycled --idate $idate --edate $edate --app $app --start $starttype --gfs_cyc $gfscyc --resdetatmos $resdetatmos --resensatmos $resensatmos --nens $nens --pslot $pslot --configdir $configdir --comroot $comroot --expdir $expdir exit $? diff --git a/test/soca/CMakeLists.txt b/test/soca/CMakeLists.txt index 74139a3ca..65b4b1138 100644 --- a/test/soca/CMakeLists.txt +++ b/test/soca/CMakeLists.txt @@ -6,35 +6,35 @@ set( TESTDATA ${PROJECT_BINARY_DIR}/test/testdata ) # test convert BUFR to IODA add_test(NAME test_gdasapp_convert_bufr_temp_dbuoy - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} temp_bufr_dbuoyprof + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} temp_bufr_dbuoyprof WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) add_test(NAME test_gdasapp_convert_bufr_salt_dbuoy - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} salt_bufr_dbuoyprof + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} salt_bufr_dbuoyprof WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) add_test(NAME test_gdasapp_convert_bufr_temp_mbuoyb - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} temp_bufr_mbuoybprof + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} temp_bufr_mbuoybprof WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) add_test(NAME test_gdasapp_convert_bufr_salt_mbuoyb - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} salt_bufr_mbuoybprof + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} salt_bufr_mbuoybprof WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) add_test(NAME test_gdasapp_convert_bufr_tesacprof - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} bufr_tesacprof + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} bufr_tesacprof WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) add_test(NAME test_gdasapp_convert_bufr_trkobprof - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} bufr_trkobprof + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} bufr_trkobprof WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) add_test(NAME test_gdasapp_convert_bufr_sfcships - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} bufr_sfcships + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} bufr_sfcships WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) add_test(NAME test_gdasapp_convert_bufr_sfcshipsu - COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} bufr_sfcshipsu + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/test_bufr2ioda.sh ${PROJECT_BINARY_DIR} ${CMAKE_BINARY_DIR} bufr_sfcshipsu WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/) # Symlink test input yaml files diff --git a/test/soca/gw/CMakeLists.txt b/test/soca/gw/CMakeLists.txt index 2d7d059d8..bd6616744 100644 --- a/test/soca/gw/CMakeLists.txt +++ b/test/soca/gw/CMakeLists.txt @@ -1,7 +1,7 @@ # test for creating an experiment directory within the global-workflow file(MAKE_DIRECTORY ${PROJECT_BINARY_DIR}/test/soca/gw/testrun) -# Prepare COMROT and static dir +# Prepare ROTDIR and static dir add_test(NAME test_gdasapp_soca_prep COMMAND ${PROJECT_SOURCE_DIR}/test/soca/gw/prep.sh ${PROJECT_BINARY_DIR} ${PROJECT_SOURCE_DIR} WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/soca/gw) @@ -11,9 +11,14 @@ add_test(NAME test_gdasapp_soca_prep # Identify machine set(MACHINE "container") -IF (IS_DIRECTORY /work2/noaa/da) - set(MACHINE "orion") - set(PARTITION "orion") +IF (IS_DIRECTORY /work2) + IF (IS_DIRECTORY /apps/other) + set(MACHINE "hercules") + set(PARTITION "hercules") + ELSE() + set(MACHINE "orion") + set(PARTITION "orion") + ENDIF() ENDIF() IF (IS_DIRECTORY /scratch2/NCEPDEV/) set(MACHINE "hera") @@ -69,7 +74,7 @@ foreach(jjob ${jjob_list}) WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/soca/gw/testrun) set_tests_properties(${test_name} PROPERTIES - ENVIRONMENT "PYTHONPATH=${PROJECT_BINARY_DIR}/lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}:$ENV{PYTHONPATH}") + ENVIRONMENT "PYTHONPATH=${PROJECT_SOURCE_DIR}/ush:${PROJECT_BINARY_DIR}/../lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}:$ENV{PYTHONPATH}") set(setup "--skip") # Only run the setup of the first test, if not, it will hang @@ -80,7 +85,7 @@ endforeach() set(ctest_list "socahybridweights" "incr_handler" "ens_handler") foreach(ctest ${ctest_list}) set(TEST ${ctest}) - set(EXEC ${PROJECT_BINARY_DIR}/bin/gdas_${ctest}.x) + set(EXEC ${PROJECT_BINARY_DIR}/../bin/gdas_${ctest}.x) set(YAML ${PROJECT_SOURCE_DIR}/test/soca/testinput/${ctest}.yaml) configure_file(${PROJECT_SOURCE_DIR}/test/soca/gw/run_gdas_apps.yaml.test ${PROJECT_BINARY_DIR}/test/soca/gw/testrun/run_gdas_apps_${ctest}.yaml) @@ -93,5 +98,5 @@ foreach(ctest ${ctest_list}) set_tests_properties(${test_name} PROPERTIES - ENVIRONMENT "PYTHONPATH=${PROJECT_BINARY_DIR}/lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}:$ENV{PYTHONPATH}") + ENVIRONMENT "PYTHONPATH=${PROJECT_BINARY_DIR}/../lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}:$ENV{PYTHONPATH}") endforeach() diff --git a/test/soca/gw/prep.sh b/test/soca/gw/prep.sh index 2aeadf224..424881ef3 100755 --- a/test/soca/gw/prep.sh +++ b/test/soca/gw/prep.sh @@ -35,7 +35,7 @@ done for day in $(seq 1 2 9); do cp ${COM}/06/model_data/ocean/history/gdas.t06z.ocnf003.nc \ ${project_binary_dir}/soca_static/bkgerr/stddev/ocn.ensstddev.fc.2019-04-0${day}T00:00:00Z.PT0S.nc - cp ${project_source_dir}/soca/test/Data/72x35x25/ice.bkgerror.nc \ + cp ${project_source_dir}/sorc/soca/test/Data/72x35x25/ice.bkgerror.nc \ ${project_binary_dir}/soca_static/bkgerr/stddev/ice.ensstddev.fc.2019-04-0${day}T00:00:00Z.PT0S.nc done diff --git a/test/soca/gw/run_jjobs.yaml.test b/test/soca/gw/run_jjobs.yaml.test index a185cee2b..c4f4e12f3 100644 --- a/test/soca/gw/run_jjobs.yaml.test +++ b/test/soca/gw/run_jjobs.yaml.test @@ -1,6 +1,6 @@ machine: @MACHINE@ -resdet: 48 -resens: 48 +resdetatmos: 48 +resensatmos: 48 jjobs: - @JJOB@ @@ -21,12 +21,12 @@ gw environement: assym_freq: 6 backgrounds: - COM_SRC: @HOMEgfs@/sorc/gdas.cd/build/test/soca/gw/COM + COM_SRC: @HOMEgfs@/sorc/gdas.cd/build/gdas/test/soca/gw/COM working directories: - ROTDIRS: @HOMEgfs@/sorc/gdas.cd/build/test/soca/gw/testrun/testjjobs/ROTDIRS - EXPDIRS: @HOMEgfs@/sorc/gdas.cd/build/test/soca/gw/testrun/testjjobs/experiments - STMP: @HOMEgfs@/sorc/gdas.cd/build/test/soca/gw/testrun/testjjobs + ROTDIRS: @HOMEgfs@/sorc/gdas.cd/build/gdas/test/soca/gw/testrun/testjjobs/ROTDIRS + EXPDIRS: @HOMEgfs@/sorc/gdas.cd/build/gdas/test/soca/gw/testrun/testjjobs/experiments + STMP: @HOMEgfs@/sorc/gdas.cd/build/gdas/test/soca/gw/testrun/testjjobs jedi: OOPS_TRACE: 1 @@ -46,7 +46,7 @@ setup_expt config: NMEM_ENS: "4" DOHYBVAR: "YES" ocnanal: - SOCA_INPUT_FIX_DIR: @HOMEgfs@/sorc/gdas.cd/build/soca_static + SOCA_INPUT_FIX_DIR: @HOMEgfs@/sorc/gdas.cd/build/gdas/soca_static CASE_ANL: C48 SOCA_OBS_LIST: @HOMEgfs@/sorc/gdas.cd/parm/soca/obs/obs_list.yaml SOCA_NINNER: 1 @@ -55,8 +55,8 @@ setup_expt config: NICAS_GRID_SIZE: 150 prepoceanobs: SOCA_OBS_LIST: @HOMEgfs@/sorc/gdas.cd/parm/soca/obs/obs_list.yaml - OBSPROC_CONFIG: @HOMEgfs@/sorc/gdas.cd/parm/soca/obsproc/obsproc_config.yaml - DMPDIR: @HOMEgfs@/sorc/gdas.cd/build/test/soca/gw/obsproc + OBSPROC_YAML: @HOMEgfs@/sorc/gdas.cd/parm/soca/obsproc/obsproc_config.yaml + DMPDIR: @HOMEgfs@/sorc/gdas.cd/build/gdas/test/soca/gw/obsproc job options: account: da-cpu diff --git a/test/soca/gw/static.sh b/test/soca/gw/static.sh index 5fc615a85..05407ea92 100755 --- a/test/soca/gw/static.sh +++ b/test/soca/gw/static.sh @@ -10,12 +10,12 @@ mkdir -p ${soca_static}/INPUT mkdir -p ${soca_static}/bump mkdir -p ${soca_static}/bkgerr/stddev -lowres=${project_source_dir}/soca/test/Data +lowres=${project_source_dir}/sorc/soca/test/Data cp -L ${lowres}/workdir/{diag_table,field_table} ${soca_static} cp -L ${project_source_dir}/test/soca/fix/MOM_input ${soca_static} cp -L ${lowres}/{fields_metadata.yml,godas_sst_bgerr.nc,rossrad.dat} ${soca_static} mv ${soca_static}/fields_metadata.yml ${soca_static}/fields_metadata.yaml -cp -L ${project_source_dir}/soca/test/testinput/obsop_name_map.yml ${soca_static}/obsop_name_map.yaml +cp -L ${project_source_dir}/sorc/soca/test/testinput/obsop_name_map.yml ${soca_static}/obsop_name_map.yaml cp -L ${lowres}/72x35x25/input.nml ${soca_static}/inputnml cp -L ${lowres}/72x35x25/INPUT/{hycom1_25.nc,ocean_mosaic.nc,grid_spec.nc,layer_coord25.nc,ocean_hgrid.nc,ocean_topog.nc} ${soca_static}/INPUT diff --git a/test/soca/test_bufr2ioda.sh b/test/soca/test_bufr2ioda.sh index 370e8639e..a6a9c0dbd 100755 --- a/test/soca/test_bufr2ioda.sh +++ b/test/soca/test_bufr2ioda.sh @@ -3,8 +3,9 @@ set -x PROJECT_BINARY_DIR=${1} -OBSSOURCE=${2} -CMD=${PROJECT_BINARY_DIR}/bin/bufr2ioda.x +CMAKE_BINARY_DIR=${2} +OBSSOURCE=${3} +CMD=${CMAKE_BINARY_DIR}/bin/bufr2ioda.x OBSYAML=${PROJECT_BINARY_DIR}/test/testinput/${OBSSOURCE}.yaml OUTFILE=`grep obsdataout ${OBSYAML} | cut -d'"' -f2` diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 949526019..8610af15c 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -25,6 +25,8 @@ case $(hostname -f) in Orion-login-[1-4].HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion1-4 + Hercules-login-[1-4].HPC.MsState.Edu) MACHINE_ID=hercules ;; ### hercules1-4 + cheyenne[1-6].cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 cheyenne[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 chadmin[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 diff --git a/ush/eva/marine_eva_post.py b/ush/eva/marine_eva_post.py index 9cdf01c90..a355621a1 100755 --- a/ush/eva/marine_eva_post.py +++ b/ush/eva/marine_eva_post.py @@ -29,7 +29,7 @@ def marine_eva_post(inputyaml, outputdir, diagdir): newfilename = os.path.join(diagdir, os.path.basename(filename)) newfilenames.append(newfilename) dataset['filenames'] = newfilenames - for graphic in input_yaml_dict['graphics']: + for graphic in input_yaml_dict['graphics']['figure_list']: # this assumes that there is only one variable, or that the # variables are all the same variable = graphic['batch figure']['variables'][0] diff --git a/ush/eva/marine_gdas_plots.yaml b/ush/eva/marine_gdas_plots.yaml index 78e9f6c17..9ad69d803 100644 --- a/ush/eva/marine_gdas_plots.yaml +++ b/ush/eva/marine_gdas_plots.yaml @@ -58,6 +58,9 @@ transforms: graphics: + plotting_backend: Emcpy + figure_list: + # ---------- Map Plots ---------- # Map plot of OmBQC # -------- diff --git a/ush/module-setup.sh b/ush/module-setup.sh index 469fd4a3c..ef7f73699 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -22,6 +22,13 @@ elif [[ $MACHINE_ID = orion* ]] ; then fi module purge +elif [[ $MACHINE_ID = hercules* ]] ; then + # We are on Hercules + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /apps/lmod/init/bash + fi + module purge + elif [[ $MACHINE_ID = s4* ]] ; then # We are on SSEC Wisconsin S4 if ( ! eval module help > /dev/null 2>&1 ) ; then diff --git a/ush/soca/examples/run_jjobs_container.yaml b/ush/soca/examples/run_jjobs_container.yaml index 13cf2a76c..92901fdc8 100644 --- a/ush/soca/examples/run_jjobs_container.yaml +++ b/ush/soca/examples/run_jjobs_container.yaml @@ -1,5 +1,5 @@ machine: container -resdet: 48 +resdetatmos: 48 jjobs: - JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP diff --git a/ush/soca/examples/run_jjobs_hera.yaml b/ush/soca/examples/run_jjobs_hera.yaml index b4bb5d448..6b2edb863 100644 --- a/ush/soca/examples/run_jjobs_hera.yaml +++ b/ush/soca/examples/run_jjobs_hera.yaml @@ -1,5 +1,5 @@ machine: hera -resdet: 48 +resdetatmos: 48 jjobs: - JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP diff --git a/ush/soca/examples/run_jjobs_hera_025.yaml b/ush/soca/examples/run_jjobs_hera_025.yaml index ba2552ef9..5686af150 100644 --- a/ush/soca/examples/run_jjobs_hera_025.yaml +++ b/ush/soca/examples/run_jjobs_hera_025.yaml @@ -1,5 +1,5 @@ machine: hera -resdet: 48 +resdetatmos: 48 jjobs: - JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP diff --git a/ush/soca/run_jjobs.py b/ush/soca/run_jjobs.py index 4e0212da2..2008c3042 100755 --- a/ush/soca/run_jjobs.py +++ b/ush/soca/run_jjobs.py @@ -6,7 +6,7 @@ import argparse from datetime import datetime, timedelta -machines = {"container", "hera", "orion"} +machines = {"container", "hera", "orion", "hercules"} # Assume the default conda environement is gdassapp ENVS = {'JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY': 'eva'} @@ -125,12 +125,12 @@ def setupexpt(self): "app": "ATM", "start": "warm", "gfs_cyc": "0", - "resdet": self.config['resdet'], - "resens": self.config['resens'], + "resdetatmos": self.config['resdetatmos'], + "resensatmos": self.config['resensatmos'], "nens": "0", "pslot": "${PSLOT}", "configdir": "${PWD}/config/gfs", - "comrot": self.rotdir, + "comroot": self.rotdir, "expdir": "${EXPDIRS}", "yaml": "overwrite_defaults.yaml"} @@ -164,7 +164,7 @@ def precom(self, com, tmpl): def copy_bkgs(self): """ Fill the ROTDIR with backgrounds - TODO: replace by fill comrot? + TODO: replace by fill comroot? """ print(f"gPDY: {self.gPDY}") print(f"gcyc: {self.gcyc}") @@ -213,7 +213,8 @@ def fixconfigs(self): print(f"Probably does not work for {machine} yet") # swap a few variables in config.base - var2replace = {'HOMEgfs': self.homegfs, + self.homegfs_real = os.path.realpath(self.homegfs) + var2replace = {'HOMEgfs': self.homegfs_real, 'STMP': self.stmp, 'ROTDIR': self.rotdir, 'EXPDIRS': self.expdirs} @@ -254,7 +255,7 @@ def execute(self, submit=False): def main(): - epilog = ["Make sure the comrot, experiment and config directories are removed before running this script", + epilog = ["Make sure the comroot, experiment and config directories are removed before running this script", "Examples:", " ./run_jjobs.py -y run_jjobs_orion.yaml", " ./run_jjobs.py -h"] @@ -272,7 +273,7 @@ def main(): exp_config = yaml.safe_load(file) if not args.skip: - # Write a setup card (prepare COMROT, configs, ...) + # Write a setup card (prepare COMROOT, configs, ...) setup_card = JobCard("setup_expt.sh", exp_config) setup_card.export_env_vars_script() setup_card.setupexpt() diff --git a/ush/submodules/add_submodules.sh b/ush/submodules/add_submodules.sh new file mode 100755 index 000000000..16d239afe --- /dev/null +++ b/ush/submodules/add_submodules.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# add_submodules.sh +# add submodules to the git commit + +my_dir="$( cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )" + +gdasdir=${1:-${my_dir}/../../} + +repos=" +oops +vader +saber +ioda +ufo +fv3-jedi +soca +iodaconv +" + +for r in $repos; do + echo "Adding ${gdasdir}/sorc/${r}" + cd ${gdasdir}/sorc + git add ${r} +done diff --git a/ush/submodules/update_develop.sh b/ush/submodules/update_develop.sh new file mode 100755 index 000000000..a6e9210bb --- /dev/null +++ b/ush/submodules/update_develop.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# update_develop.sh +# update specified repositories to most recent develop hash + +repos=" +oops +vader +saber +ioda +ufo +fv3-jedi +soca +iodaconv +" + +my_dir="$( cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )" + +gdasdir=${1:-${my_dir}/../../} + +for r in $repos; do + echo "Updating ${gdasdir}/sorc/${r}" + cd ${gdasdir}/sorc + git submodule update --remote --merge ${r} +done diff --git a/utils/test/CMakeLists.txt b/utils/test/CMakeLists.txt index a39bad869..fcbb26e37 100644 --- a/utils/test/CMakeLists.txt +++ b/utils/test/CMakeLists.txt @@ -1,4 +1,4 @@ -# Create Data directory for test input config and symlink all files +#eT Create Data directory for test input config and symlink all files list( APPEND utils_test_input testinput/gdas_meanioda.yaml testinput/gdas_rads2ioda.yaml @@ -45,8 +45,8 @@ ecbuild_add_test( TARGET test_gdasapp_util_ioda_example # Prepare data for the IODA converters file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/obsproc) ecbuild_add_test( TARGET test_gdasapp_util_prepdata - COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/test/prepdata.sh - ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/test + COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/prepdata.sh + ARGS ${CMAKE_CURRENT_SOURCE_DIR} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/obsproc) # Test the RADS to IODA converter diff --git a/utils/test/testinput/gdas_meanioda.yaml b/utils/test/testinput/gdas_meanioda.yaml index 555ca25e7..91cdbf5b8 100644 --- a/utils/test/testinput/gdas_meanioda.yaml +++ b/utils/test/testinput/gdas_meanioda.yaml @@ -8,7 +8,7 @@ obs space: obsdatain: engine: type: H5File - obsfile: ../../../soca/test/Data/obs/gmi_gpm_obs.nc + obsfile: ../../../sorc/soca/test/Data/obs/gmi_gpm_obs.nc # the below 2 lines are not used but needed by the IODA obsspace it seems... simulated variables: [brightnessTemperature] observed variables: [brightnessTemperature]