diff --git a/CMakeLists.txt b/CMakeLists.txt index d8545e49b..5bbe8e218 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -96,6 +96,9 @@ if(BUILD_GDASBUNDLE) ecbuild_bundle( PROJECT mom6 GIT "https://github.com/jcsda-internal/MOM6.git" TAG 51ec489 RECURSIVE ) ecbuild_bundle( PROJECT soca GIT "https://github.com/jcsda-internal/soca.git" TAG 7efe282 ) + # Build JEDI/DA or other peripherals + ecbuild_bundle( PROJECT gdas-utils SOURCE "./utils" ) + # Build IODA converters option(BUILD_IODA_CONVERTERS "Build IODA Converters" ON) if(BUILD_IODA_CONVERTERS) diff --git a/build.sh b/build.sh index 0ca550994..a6040b0f0 100755 --- a/build.sh +++ b/build.sh @@ -114,7 +114,7 @@ set -x if [[ $BUILD_JCSDA == 'YES' ]]; then make -j ${BUILD_JOBS:-6} VERBOSE=$BUILD_VERBOSE else - builddirs="fv3-jedi soca iodaconv land-imsproc land-jediincr" + builddirs="fv3-jedi soca iodaconv land-imsproc land-jediincr gdas-utils" for b in $builddirs; do cd $b make -j ${BUILD_JOBS:-6} VERBOSE=$BUILD_VERBOSE diff --git a/parm/soca/variational/socaincr2mom6.yaml b/parm/soca/variational/socaincr2mom6.yaml new file mode 100644 index 000000000..c6c2a50c3 --- /dev/null +++ b/parm/soca/variational/socaincr2mom6.yaml @@ -0,0 +1,27 @@ +geometry: + mom6_input_nml: mom_input.nml + fields metadata: ./fields_metadata.yaml + +date: '{{ATM_WINDOW_BEGIN}}' + +layers variable: [hocn] + +increment variables: [tocn, socn, uocn, vocn, ssh] + +vertical geometry: + date: '{{ATM_WINDOW_BEGIN}}' + basename: ./INPUT/ + ocn_filename: MOM.res.nc + read_from_file: 1 + +soca increment: + date: '{{ATM_WINDOW_BEGIN}}' + basename: ./Data/ + ocn_filename: 'ocn.3dvarfgat_pseudo.incr.{{ATM_WINDOW_BEGIN}}.nc' + read_from_file: 1 + +mom6 iau increment: + datadir: ./ + date: '{{ATM_WINDOW_BEGIN}}' + exp: mom6_iau + type: incr diff --git a/scripts/exgdas_global_marine_analysis_chkpt.sh b/scripts/exgdas_global_marine_analysis_chkpt.sh index 6863a29a6..be506379d 100755 --- a/scripts/exgdas_global_marine_analysis_chkpt.sh +++ b/scripts/exgdas_global_marine_analysis_chkpt.sh @@ -55,10 +55,7 @@ EOF --out "${mom6_iau_incr}" \ --nsst_yaml "nsst.yaml" else - ${HOMEgfs}/sorc/gdas.cd/ush/socaincr2mom6.py --incr "${soca_incr}" \ - --bkg "${DATA}/INPUT/MOM.res.nc" \ - --grid "${DATA}/soca_gridspec.nc" \ - --out "${mom6_iau_incr}" + $APRUN_OCNANAL ${JEDI_BIN}/gdas_socaincr2mom6.x socaincr2mom6.yaml fi export err=$? if [ $err -gt 0 ]; then diff --git a/scripts/exgdas_global_marine_analysis_prep.py b/scripts/exgdas_global_marine_analysis_prep.py index 3147facc6..21af4bcc7 100755 --- a/scripts/exgdas_global_marine_analysis_prep.py +++ b/scripts/exgdas_global_marine_analysis_prep.py @@ -472,6 +472,14 @@ def find_clim_ens(input_date): yaml.dump(soca2cice_cfg, f, sort_keys=False, default_flow_style=False) ufsda.genYAML.genYAML('tmp.yaml', output=varchgyaml) +# prepare yaml for soca to MOM6 IAU increment +logging.info(f"---------------- generate soca to MOM6 IAU yaml") +socaincr2mom6_yaml = os.path.join(anl_dir, 'socaincr2mom6.yaml') +socaincr2mom6_yaml_template = os.path.join(gdas_home, 'parm', 'soca', 'variational', 'socaincr2mom6.yaml') +s2mconfig = YAMLFile(path=socaincr2mom6_yaml_template) +s2mconfig = Template.substitute_structure(s2mconfig, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig.get) +s2mconfig.save(socaincr2mom6_yaml) + ################################################################################ # Copy initial condition ics_list = [] diff --git a/scripts/exgdas_global_marine_analysis_vrfy.py b/scripts/exgdas_global_marine_analysis_vrfy.py index 04f56afd5..ed68fb100 100755 --- a/scripts/exgdas_global_marine_analysis_vrfy.py +++ b/scripts/exgdas_global_marine_analysis_vrfy.py @@ -19,108 +19,13 @@ import os import numpy as np -import matplotlib.pyplot as plt -import xarray as xr -import cartopy -import cartopy.crs as ccrs import gen_eva_obs_yaml import marine_eva_post import diag_statistics +from soca_vrfy import plot_config, plot_horizontal_slice, plot_zonal_slice import subprocess from datetime import datetime, timedelta -projs = {'North': ccrs.NorthPolarStereo(), - 'South': ccrs.SouthPolarStereo(), - 'Global': ccrs.Mollweide(central_longitude=-150)} - - -def plot_config(grid_file=[], data_file=[], - variable=[], levels=[], bounds=[], colormap=[], comout=[], lats=[]): - """ - Prepares the configuration for the plotting functions below - """ - config = {} - config['grid file'] = grid_file - config['fields file'] = data_file - config['variable'] = variable - config['levels'] = levels - config['bounds'] = bounds - config['colormap'] = colormap - config['lats'] = lats - config['comout'] = comout - config['max depth'] = 5000.0 - config['proj'] = 'Global' - return config - - -def plot_horizontal_slice(config): - """ - pcolormesh of a horizontal slice of an ocean field - """ - grid = xr.open_dataset(config['grid file']) - data = xr.open_dataset(config['fields file']) - - dirname = os.path.join(config['comout'], config['variable']) - os.makedirs(dirname, exist_ok=True) - - if config['variable'] in ['Temp', 'Salt', 'u', 'v']: - level = config['levels'][0] - slice_data = np.squeeze(data[config['variable']])[level, :, :] - label_colorbar = config['variable']+' Level '+str(level) - figname = os.path.join(dirname, config['variable']+'_Level_'+str(level)) - else: - slice_data = np.squeeze(data[config['variable']]) - label_colorbar = config['variable'] - figname = os.path.join(dirname, config['variable']+'_'+config['proj']) - - bounds = config['bounds'] - - fig, ax = plt.subplots(figsize=(8, 5), subplot_kw={'projection': projs[config['proj']]}) - plt.pcolormesh(np.squeeze(grid.lon), - np.squeeze(grid.lat), - slice_data, - vmin=bounds[0], vmax=bounds[1], - transform=ccrs.PlateCarree(), - cmap=config['colormap']) - - plt.colorbar(label=label_colorbar, shrink=0.5, orientation='horizontal') - ax.coastlines() # TODO: make this work on hpc - ax.gridlines(draw_labels=True) - if config['proj'] == 'South': - ax.set_extent([-180, 180, -90, -50], ccrs.PlateCarree()) - if config['proj'] == 'North': - ax.set_extent([-180, 180, 50, 90], ccrs.PlateCarree()) - # ax.add_feature(cartopy.feature.LAND) # TODO: make this work on hpc - plt.savefig(figname, bbox_inches='tight', dpi=600) - - -def plot_zonal_slice(config): - """ - pcolormesh of a zonal slice of an ocean field - """ - lat = float(config['lats'][0]) - grid = xr.open_dataset(config['grid file']) - data = xr.open_dataset(config['fields file']) - lat_index = np.argmin(np.array(np.abs(np.squeeze(grid.lat)[:, 0]-lat))) - slice_data = np.squeeze(np.array(data[config['variable']]))[:, lat_index, :] - depth = np.squeeze(np.array(grid['h']))[:, lat_index, :] - depth[np.where(np.abs(depth) > 10000.0)] = 0.0 - depth = np.cumsum(depth, axis=0) - bounds = config['bounds'] - x = np.tile(np.squeeze(grid.lon[:, lat_index]), (np.shape(depth)[0], 1)) - fig, ax = plt.subplots(figsize=(8, 5)) - plt.pcolormesh(x, -depth, slice_data, - vmin=bounds[0], vmax=bounds[1], - cmap=config['colormap']) - plt.colorbar(label=config['variable']+' Lat '+str(lat), shrink=0.5, orientation='horizontal') - ax.set_ylim(-config['max depth'], 0) - dirname = os.path.join(config['comout'], config['variable']) - os.makedirs(dirname, exist_ok=True) - figname = os.path.join(dirname, config['variable'] + - 'zonal_lat_'+str(int(lat)) + '_' + str(int(config['max depth'])) + 'm') - plt.savefig(figname, bbox_inches='tight', dpi=600) - - comout = os.getenv('COM_OCEAN_ANALYSIS') com_ice_history = os.getenv('COM_ICE_HISTORY_PREV') com_ocean_history = os.getenv('COM_OCEAN_HISTORY_PREV') diff --git a/test/atm/global-workflow/jjob_ens_final.sh b/test/atm/global-workflow/jjob_ens_final.sh index 8296fb29a..ae1928a18 100755 --- a/test/atm/global-workflow/jjob_ens_final.sh +++ b/test/atm/global-workflow/jjob_ens_final.sh @@ -45,8 +45,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' -o $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=debug --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE +if [ $machine = 'HERA' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE +elif [ $machine = 'ORION' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE fi diff --git a/test/atm/global-workflow/jjob_ens_init.sh b/test/atm/global-workflow/jjob_ens_init.sh index fbeabd5f7..8966f2b69 100755 --- a/test/atm/global-workflow/jjob_ens_init.sh +++ b/test/atm/global-workflow/jjob_ens_init.sh @@ -104,8 +104,10 @@ for imem in $(seq 1 $NMEM_ENS); do done # Execute j-job -if [ $machine = 'HERA' -o $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=debug --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE +if [ $machine = 'HERA' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE +elif [ $machine = 'ORION' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE fi diff --git a/test/atm/global-workflow/jjob_ens_run.sh b/test/atm/global-workflow/jjob_ens_run.sh index 56e384f79..597afcd43 100755 --- a/test/atm/global-workflow/jjob_ens_run.sh +++ b/test/atm/global-workflow/jjob_ens_run.sh @@ -48,8 +48,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' -o $machine = 'ORION' ]; then - sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=debug --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN +if [ $machine = 'HERA' ]; then + sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN +elif [ $machine = 'ORION' ]; then + sbatch --nodes=1 --ntasks=36 --account=$ACCOUNT --qos=batch --partition=orion --time=00:30:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN else ${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN fi diff --git a/test/atm/global-workflow/jjob_var_final.sh b/test/atm/global-workflow/jjob_var_final.sh index fa03cc927..f9b64afbe 100755 --- a/test/atm/global-workflow/jjob_var_final.sh +++ b/test/atm/global-workflow/jjob_var_final.sh @@ -45,8 +45,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' -o $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=debug --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE +if [ $machine = 'HERA' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE +elif [ $machine = 'ORION' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE fi diff --git a/test/atm/global-workflow/jjob_var_init.sh b/test/atm/global-workflow/jjob_var_init.sh index 2bd4ce637..8de5cb329 100755 --- a/test/atm/global-workflow/jjob_var_init.sh +++ b/test/atm/global-workflow/jjob_var_init.sh @@ -101,8 +101,10 @@ done # Execute j-job -if [ $machine = 'HERA' -o $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=debug --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE +if [ $machine = 'HERA' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE +elif [ $machine = 'ORION' ]; then + sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE else ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE fi diff --git a/test/atm/global-workflow/jjob_var_run.sh b/test/atm/global-workflow/jjob_var_run.sh index 200fdd1b2..a78b1e10e 100755 --- a/test/atm/global-workflow/jjob_var_run.sh +++ b/test/atm/global-workflow/jjob_var_run.sh @@ -48,8 +48,10 @@ elif [ $machine = 'ORION' ]; then fi # Execute j-job -if [ $machine = 'HERA' -o $machine = 'ORION' ]; then - sbatch --ntasks=6 --account=$ACCOUNT --qos=debug --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN +if [ $machine = 'HERA' ]; then + sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN +elif [ $machine = 'ORION' ]; then + sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN else ${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN fi diff --git a/test/atm/global-workflow/run_jedi_exe_3denvar.sh b/test/atm/global-workflow/run_jedi_exe_3denvar.sh index 3e94394de..25824dcb4 100755 --- a/test/atm/global-workflow/run_jedi_exe_3denvar.sh +++ b/test/atm/global-workflow/run_jedi_exe_3denvar.sh @@ -26,7 +26,7 @@ if [ "$machine" = "hera" ] ; then partition="hera" gdasfix="/scratch1/NCEPDEV/da/Cory.R.Martin/GDASApp/fix" elif [ "$machine" = "orion" ]; then - partition="debug" + partition="orion" gdasfix="/work2/noaa/da/cmartin/GDASApp/fix" fi @@ -75,7 +75,7 @@ config: job options: machine: ${machine} account: da-cpu - queue: debug + queue: batch partition: ${partition} walltime: '30:00' ntasks: 6 diff --git a/test/atm/global-workflow/run_jedi_exe_3dhofx.sh b/test/atm/global-workflow/run_jedi_exe_3dhofx.sh index fb1a6a99e..0cb06d109 100755 --- a/test/atm/global-workflow/run_jedi_exe_3dhofx.sh +++ b/test/atm/global-workflow/run_jedi_exe_3dhofx.sh @@ -26,7 +26,7 @@ if [ "$machine" = "hera" ] ; then partition="hera" gdasfix="/scratch1/NCEPDEV/da/Cory.R.Martin/GDASApp/fix" elif [ "$machine" = "orion" ]; then - partition="debug" + partition="orion" gdasfix="/work2/noaa/da/cmartin/GDASApp/fix" fi @@ -64,7 +64,7 @@ config: job options: machine: ${machine} account: da-cpu - queue: debug + queue: batch partition: ${partition} walltime: '30:00' ntasks: 36 diff --git a/test/atm/global-workflow/run_jedi_exe_3dvar.sh b/test/atm/global-workflow/run_jedi_exe_3dvar.sh index 459f52121..65b201504 100755 --- a/test/atm/global-workflow/run_jedi_exe_3dvar.sh +++ b/test/atm/global-workflow/run_jedi_exe_3dvar.sh @@ -26,7 +26,7 @@ if [ "$machine" = "hera" ] ; then partition="hera" gdasfix="/scratch1/NCEPDEV/da/Cory.R.Martin/GDASApp/fix" elif [ "$machine" = "orion" ]; then - partition="debug" + partition="orion" gdasfix="/work2/noaa/da/cmartin/GDASApp/fix" fi @@ -74,7 +74,7 @@ config: job options: machine: ${machine} account: da-cpu - queue: debug + queue: batch partition: ${partition} walltime: '30:00' ntasks: 6 diff --git a/test/atm/global-workflow/run_jedi_exe_letkf.sh b/test/atm/global-workflow/run_jedi_exe_letkf.sh index d507af5b6..d3db2f8c4 100755 --- a/test/atm/global-workflow/run_jedi_exe_letkf.sh +++ b/test/atm/global-workflow/run_jedi_exe_letkf.sh @@ -26,7 +26,7 @@ if [ "$machine" = "hera" ] ; then partition="hera" gdasfix="/scratch1/NCEPDEV/da/Cory.R.Martin/GDASApp/fix" elif [ "$machine" = "orion" ]; then - partition="debug" + partition="orion" gdasfix="/work2/noaa/da/cmartin/GDASApp/fix" fi @@ -73,7 +73,7 @@ config: job options: machine: ${machine} account: da-cpu - queue: debug + queue: batch partition: ${partition} walltime: '30:00' ntasks: 36 diff --git a/test/soca/gw/CMakeLists.txt b/test/soca/gw/CMakeLists.txt index 2e1117909..e1b20f602 100644 --- a/test/soca/gw/CMakeLists.txt +++ b/test/soca/gw/CMakeLists.txt @@ -14,7 +14,7 @@ add_test(NAME test_gdasapp_soca_prep set(MACHINE "container") IF (IS_DIRECTORY /work2/noaa/da) set(MACHINE "orion") - set(PARTITION "debug") + set(PARTITION "orion") ENDIF() IF (IS_DIRECTORY /scratch2/NCEPDEV/) set(MACHINE "hera") @@ -33,6 +33,8 @@ add_test(NAME test_gdasapp_soca_concatioda add_test(NAME test_gdasapp_soca_run_clean COMMAND ${CMAKE_COMMAND} -E remove_directory ${PROJECT_BINARY_DIR}/test/soca/gw/testrun/testjjobs) +# Create scratch for testing apps +file(MAKE_DIRECTORY ${PROJECT_BINARY_DIR}/test/soca/gw/apps_scratch) # Test JGDAS_GLOBAL_OCEAN_ANALYSIS_* set(jjob_list "JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP" @@ -46,6 +48,14 @@ set(jjob_list "JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP" set(setup "") foreach(jjob ${jjob_list}) + # Make a copy of the scratch before it is removed by post + IF (jjob STREQUAL "JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT") + add_test(NAME test_gdasapp_soca_copy_scratch + COMMAND ${PROJECT_SOURCE_DIR}/test/soca/gw/copy_scratch.sh ${PROJECT_BINARY_DIR} + WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/soca/gw) + ENDIF() + + # substitute a few variables in the test yaml message("ctest for ${jjob}") set(HOMEgfs ${PROJECT_SOURCE_DIR}/../..) @@ -66,3 +76,23 @@ foreach(jjob ${jjob_list}) set(setup "--skip") # Only run the setup of the first test, if not, it will hang # waiting for standard input from setup_expt.py endforeach() + +# Test gdas/oops applications +set(ctest_list "socahybridweights" "socaincr2mom6") +foreach(ctest ${ctest_list}) + set(TEST ${ctest}) + set(EXEC ${PROJECT_BINARY_DIR}/bin/gdas_${ctest}.x) + set(YAML ${PROJECT_SOURCE_DIR}/test/soca/testinput/${ctest}.yaml) + configure_file(${PROJECT_SOURCE_DIR}/test/soca/gw/run_gdas_apps.yaml.test + ${PROJECT_BINARY_DIR}/test/soca/gw/testrun/run_gdas_apps_${ctest}.yaml) + set(test_name "test_gdasapp_soca_${ctest}") + add_test(NAME ${test_name} + COMMAND ${PROJECT_SOURCE_DIR}/ush/soca/run_jjobs.py + -y ${PROJECT_BINARY_DIR}/test/soca/gw/testrun/run_gdas_apps_${ctest}.yaml + --skip --ctest True + WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/soca/gw/apps_scratch) + + set_tests_properties(${test_name} + PROPERTIES + ENVIRONMENT "PYTHONPATH=${PROJECT_BINARY_DIR}/lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}:$ENV{PYTHONPATH}") +endforeach() diff --git a/test/soca/gw/copy_scratch.sh b/test/soca/gw/copy_scratch.sh new file mode 100755 index 000000000..9058d8eea --- /dev/null +++ b/test/soca/gw/copy_scratch.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +project_binary_dir=$1 + +cp -r ${project_binary_dir}/test/soca/gw/testrun/testjjobs/RUNDIRS/gdas_test/gdasocnanal_12/* ${project_binary_dir}/test/soca/gw/apps_scratch diff --git a/test/soca/gw/run_gdas_apps.yaml.test b/test/soca/gw/run_gdas_apps.yaml.test new file mode 100644 index 000000000..8b4cca66e --- /dev/null +++ b/test/soca/gw/run_gdas_apps.yaml.test @@ -0,0 +1,14 @@ +machine: @MACHINE@ + +ctest command: + executable: @EXEC@ + yaml input: @YAML@ + +job options: + account: da-cpu + qos: batch + output: @TEST@.out + nodes: 1 + ntasks: 1 + partition: @PARTITION@ + time: 00:05:00 diff --git a/test/soca/gw/run_jjobs.yaml.test b/test/soca/gw/run_jjobs.yaml.test index b619e8ccc..eaa6575ea 100644 --- a/test/soca/gw/run_jjobs.yaml.test +++ b/test/soca/gw/run_jjobs.yaml.test @@ -51,7 +51,7 @@ setup_expt config: job options: account: da-cpu - qos: debug + qos: batch output: @JJOB@.out nodes: 1 ntasks: 16 diff --git a/test/soca/testinput/socahybridweights.yaml b/test/soca/testinput/socahybridweights.yaml new file mode 100644 index 000000000..cdad1ab27 --- /dev/null +++ b/test/soca/testinput/socahybridweights.yaml @@ -0,0 +1,26 @@ +geometry: + mom6_input_nml: mom_input.nml + fields metadata: ./fields_metadata.yaml + +date: 2018-04-15T09:00:00Z + +variables: + ice: [cicen, hicen, hsnon] + ocean: [tocn, socn, uocn, vocn, ssh] + +background: + date: '2018-04-15T09:00:00Z' + basename: ./INPUT/ + ocn_filename: MOM.res.nc + ice_filename: cice.res.nc + read_from_file: 1 + +weights: + ice: 0.1 + ocean: 0.5 + +output: + datadir: ./ + date: '2018-04-15T12:00:00Z' + exp: hybrid_weights + type: incr diff --git a/test/soca/testinput/socaincr2mom6.yaml b/test/soca/testinput/socaincr2mom6.yaml new file mode 100644 index 000000000..0b11b893d --- /dev/null +++ b/test/soca/testinput/socaincr2mom6.yaml @@ -0,0 +1,27 @@ +geometry: + mom6_input_nml: mom_input.nml + fields metadata: ./fields_metadata.yaml + +date: 2018-04-15T09:00:00Z + +layers variable: [hocn] + +increment variables: [tocn, socn, uocn, vocn, ssh] + +vertical geometry: + date: 2018-04-15T09:00:00Z + basename: ./INPUT/ + ocn_filename: MOM.res.nc + read_from_file: 1 + +soca increment: + date: 2018-04-15T09:00:00Z + basename: ./Data/ + ocn_filename: 'ocn.3dvarfgat_pseudo.incr.2018-04-15T09:00:00Z.nc' + read_from_file: 1 + +mom6 iau increment: + datadir: ./ + date: 2018-04-15T09:00:00Z + exp: mom6_iau + type: incr diff --git a/ush/soca/run_jjobs.py b/ush/soca/run_jjobs.py index 1cbb5bd5c..d4149633c 100755 --- a/ush/soca/run_jjobs.py +++ b/ush/soca/run_jjobs.py @@ -15,23 +15,37 @@ class JobCard: - def __init__(self, scriptname, config): + def __init__(self, scriptname, config, ctest=False): """ Constructor for the JobCard class. :param scriptname: name of the script file :param config: dictionary containing configuration information """ self.name = scriptname + self.config = config + self.machine = config['machine'] self.f = open(self.name, "w") self.f.write("#!/usr/bin/env bash\n") + self.f.write(f"# Running on {self.machine} \n") + + # Exit early if not testing a gw jjob + print(config) + if ctest: + self.header() + # Hard coded to one task for now + mpiexec = "mpirun -np 1" + if self.machine != "container": + mpiexec = "srun -n 1" + command = f"{mpiexec} {config['ctest command']['executable']} {config['ctest command']['yaml input']}" + self.f.write(f"{command} \n") + return + self.pslot = config['gw environement']['experiment identifier']['PSLOT'] self.homegfs = config['gw environement']['experiment identifier']['HOMEgfs'] self.stmp = config['gw environement']['working directories']['STMP'] self.rotdirs = config['gw environement']['working directories']['ROTDIRS'] self.rotdir = os.path.join(self.rotdirs, self.pslot) self.expdirs = config['gw environement']['working directories']['EXPDIRS'] - self.config = config - self.machine = config['machine'] # get cycle info self.PDY = config['gw environement']['cycle info']['PDY'] @@ -235,6 +249,7 @@ def main(): epilog=os.linesep.join(epilog)) parser.add_argument("-y", "--yaml", required=True, help="The YAML file") parser.add_argument("-s", "--skip", action='store_true', default=False, help="Skip setup_expt.sh if present") + parser.add_argument("-c", "--ctest", default=False, help="If true, the test is not a jjob (default)") args = parser.parse_args() # Get the experiment configuration @@ -250,13 +265,19 @@ def main(): setup_card.close() # flush to file, close and make it executable setup_card.execute() # run the setup card - # Write the j-jobs card - run_card = JobCard("run_jjobs.sh", exp_config) - run_card.fixconfigs() # over-write some of the config variables - run_card.header() # prepare a machine dependent header (SLURM or nothing) - run_card.export_env_vars_script() - run_card.copy_bkgs() - run_card.jjobs() + if args.ctest: + # Write the ctest card + run_card = JobCard("run_jjobs.sh", exp_config, ctest=True) + else: + # Write the j-jobs card + run_card = JobCard("run_jjobs.sh", exp_config) + run_card.fixconfigs() # over-write some of the config variables + run_card.header() # prepare a machine dependent header (SLURM or nothing) + run_card.export_env_vars_script() + run_card.copy_bkgs() + run_card.jjobs() + run_card.close() + run_card.close() # Submit/Run the j-jobs card diff --git a/ush/soca_vrfy.py b/ush/soca_vrfy.py new file mode 100755 index 000000000..f06e4cd4f --- /dev/null +++ b/ush/soca_vrfy.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 + +# make plots for marine analysis + +import matplotlib.pyplot as plt +import xarray as xr +import cartopy +import cartopy.crs as ccrs +import numpy as np +import os + + +projs = {'North': ccrs.NorthPolarStereo(), + 'South': ccrs.SouthPolarStereo(), + 'Global': ccrs.Mollweide(central_longitude=-150)} + + +def plot_config(grid_file=[], data_file=[], + variable=[], levels=[], bounds=[], colormap=[], comout=[], lats=[]): + """ + Prepares the configuration for the plotting functions below + """ + config = {} + config['grid file'] = grid_file + config['fields file'] = data_file + config['variable'] = variable + config['levels'] = levels + config['bounds'] = bounds + config['colormap'] = colormap + config['lats'] = lats + config['comout'] = comout + config['max depth'] = 5000.0 + config['proj'] = 'Global' + return config + + +def plot_horizontal_slice(config): + """ + pcolormesh of a horizontal slice of an ocean field + """ + grid = xr.open_dataset(config['grid file']) + data = xr.open_dataset(config['fields file']) + + dirname = os.path.join(config['comout'], config['variable']) + os.makedirs(dirname, exist_ok=True) + + if config['variable'] in ['Temp', 'Salt', 'u', 'v']: + level = config['levels'][0] + slice_data = np.squeeze(data[config['variable']])[level, :, :] + label_colorbar = config['variable']+' Level '+str(level) + figname = os.path.join(dirname, config['variable']+'_Level_'+str(level)) + else: + slice_data = np.squeeze(data[config['variable']]) + label_colorbar = config['variable'] + figname = os.path.join(dirname, config['variable']+'_'+config['proj']) + + bounds = config['bounds'] + + fig, ax = plt.subplots(figsize=(8, 5), subplot_kw={'projection': projs[config['proj']]}) + plt.pcolormesh(np.squeeze(grid.lon), + np.squeeze(grid.lat), + slice_data, + vmin=bounds[0], vmax=bounds[1], + transform=ccrs.PlateCarree(), + cmap=config['colormap']) + + plt.colorbar(label=label_colorbar, shrink=0.5, orientation='horizontal') + ax.coastlines() # TODO: make this work on hpc + ax.gridlines(draw_labels=True) + if config['proj'] == 'South': + ax.set_extent([-180, 180, -90, -50], ccrs.PlateCarree()) + if config['proj'] == 'North': + ax.set_extent([-180, 180, 50, 90], ccrs.PlateCarree()) + # ax.add_feature(cartopy.feature.LAND) # TODO: make this work on hpc + plt.savefig(figname, bbox_inches='tight', dpi=600) + + +def plot_zonal_slice(config): + """ + pcolormesh of a zonal slice of an ocean field + """ + lat = float(config['lats'][0]) + grid = xr.open_dataset(config['grid file']) + data = xr.open_dataset(config['fields file']) + lat_index = np.argmin(np.array(np.abs(np.squeeze(grid.lat)[:, 0]-lat))) + slice_data = np.squeeze(np.array(data[config['variable']]))[:, lat_index, :] + depth = np.squeeze(np.array(grid['h']))[:, lat_index, :] + depth[np.where(np.abs(depth) > 10000.0)] = 0.0 + depth = np.cumsum(depth, axis=0) + bounds = config['bounds'] + x = np.tile(np.squeeze(grid.lon[:, lat_index]), (np.shape(depth)[0], 1)) + fig, ax = plt.subplots(figsize=(8, 5)) + plt.pcolormesh(x, -depth, slice_data, + vmin=bounds[0], vmax=bounds[1], + cmap=config['colormap']) + plt.colorbar(label=config['variable']+' Lat '+str(lat), shrink=0.5, orientation='horizontal') + ax.set_ylim(-config['max depth'], 0) + dirname = os.path.join(config['comout'], config['variable']) + os.makedirs(dirname, exist_ok=True) + figname = os.path.join(dirname, config['variable'] + + 'zonal_lat_'+str(int(lat)) + '_' + str(int(config['max depth'])) + 'm') + plt.savefig(figname, bbox_inches='tight', dpi=600) diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt new file mode 100644 index 000000000..fa229981b --- /dev/null +++ b/utils/CMakeLists.txt @@ -0,0 +1,18 @@ +project(gdas-utils LANGUAGES C CXX ) + +find_package(NetCDF REQUIRED COMPONENTS CXX) +find_package(oops REQUIRED) +find_package(atlas REQUIRED) +find_package(soca REQUIRED) + +ecbuild_add_executable( TARGET gdas_socaincr2mom6.x + SOURCES gdas_socaincr2mom6.cc ) + +target_compile_features( gdas_socaincr2mom6.x PUBLIC cxx_std_17) +target_link_libraries( gdas_socaincr2mom6.x PUBLIC NetCDF::NetCDF_CXX oops atlas soca) + +ecbuild_add_executable( TARGET gdas_socahybridweights.x + SOURCES gdas_socahybridweights.cc ) + +target_compile_features( gdas_socahybridweights.x PUBLIC cxx_std_17) +target_link_libraries( gdas_socahybridweights.x PUBLIC NetCDF::NetCDF_CXX oops atlas soca) diff --git a/utils/gdas_socahybridweights.cc b/utils/gdas_socahybridweights.cc new file mode 100644 index 000000000..1a6ae38e1 --- /dev/null +++ b/utils/gdas_socahybridweights.cc @@ -0,0 +1,8 @@ +#include "gdas_socahybridweights.h" +#include "oops/runs/Run.h" + +int main(int argc, char ** argv) { + oops::Run run(argc, argv); + gdasapp::SocaHybridWeights socahybridweights; + return run.execute(socahybridweights); +} diff --git a/utils/gdas_socahybridweights.h b/utils/gdas_socahybridweights.h new file mode 100644 index 000000000..d6a52c357 --- /dev/null +++ b/utils/gdas_socahybridweights.h @@ -0,0 +1,86 @@ +#include +#include +#include + +#include "eckit/config/LocalConfiguration.h" + +#include "atlas/field.h" + +#include "oops/base/PostProcessor.h" +#include "oops/mpi/mpi.h" +#include "oops/runs/Application.h" +#include "oops/util/DateTime.h" +#include "oops/util/Duration.h" +#include "oops/util/Logger.h" + +#include "soca/State/State.h" +#include "soca/Geometry/Geometry.h" +#include "soca/Increment/Increment.h" + +namespace gdasapp { + + class SocaHybridWeights : public oops::Application { + public: + explicit SocaHybridWeights(const eckit::mpi::Comm & comm = oops::mpi::world()) + : Application(comm) {} + static const std::string classname() {return "gdasapp::SocaHybridWeights";} + + int execute(const eckit::Configuration & fullConfig, bool /*validate*/) const { + + /// Setup the soca geometry + const eckit::LocalConfiguration geomConfig(fullConfig, "geometry"); + oops::Log::info() << "geometry: " << std::endl << geomConfig << std::endl; + const soca::Geometry geom(geomConfig, this->getComm()); + + /// Get the date + std::string strdt; + fullConfig.get("date", strdt); + util::DateTime dt = util::DateTime(strdt); + + /// Get the list of variables + oops::Variables socaOcnVars(fullConfig, "variables.ocean"); + oops::Variables socaIceVars(fullConfig, "variables.ice"); + oops::Variables socaVars(socaIceVars); + socaVars += socaOcnVars; + + /// Read the background + // TODO: Use the ice extent to set the weights ... no clue if this is + // possible at this level + soca::State socaBkg(geom, socaVars, dt); + const eckit::LocalConfiguration socaBkgConfig(fullConfig, "background"); + socaBkg.read(socaBkgConfig); + oops::Log::info() << "socaBkg: " << std::endl << socaBkg << std::endl; + + /// Read weights + const eckit::LocalConfiguration socaHWConfig(fullConfig, "weights"); + double wIce = socaHWConfig.getDouble("ice"); + double wOcean = socaHWConfig.getDouble("ocean"); + oops::Log::info() << "wIce: " << wIce << std::endl; + oops::Log::info() << "wOcean: " << wOcean << std::endl; + + /// Create fields of weights for seaice + soca::Increment socaIceHW(geom, socaVars, dt); // ocean field is mandatory for writting + socaIceHW.ones(); + socaIceHW *= wIce; + oops::Log::info() << "socaIceHW: " << std::endl << socaIceHW << std::endl; + const eckit::LocalConfiguration socaHWOutConfig(fullConfig, "output"); + socaIceHW.write(socaHWOutConfig); + + /// Create fields of weights for the ocean + soca::Increment socaOcnHW(geom, socaOcnVars, dt); + socaOcnHW.ones(); + socaOcnHW *= wOcean; + oops::Log::info() << "socaOcnHW: " << std::endl << socaOcnHW << std::endl; + socaOcnHW.write(socaHWOutConfig); + + return 0; + } + // ----------------------------------------------------------------------------- + private: + std::string appname() const { + return "gdasapp::SocaHybridWeights"; + } + // ----------------------------------------------------------------------------- + }; + +} // namespace gdasapp diff --git a/utils/gdas_socaincr2mom6.cc b/utils/gdas_socaincr2mom6.cc new file mode 100644 index 000000000..0b1230f56 --- /dev/null +++ b/utils/gdas_socaincr2mom6.cc @@ -0,0 +1,8 @@ +#include "gdas_socaincr2mom6.h" +#include "oops/runs/Run.h" + +int main(int argc, char ** argv) { + oops::Run run(argc, argv); + gdasapp::SocaIncr2Mom6 socaincr2mom6; + return run.execute(socaincr2mom6); +} diff --git a/utils/gdas_socaincr2mom6.h b/utils/gdas_socaincr2mom6.h new file mode 100644 index 000000000..c644b4c01 --- /dev/null +++ b/utils/gdas_socaincr2mom6.h @@ -0,0 +1,112 @@ +#include +#include +#include + +#include "eckit/config/LocalConfiguration.h" + +#include "atlas/field.h" + +#include "oops/base/PostProcessor.h" +#include "oops/mpi/mpi.h" +#include "oops/runs/Application.h" +#include "oops/util/DateTime.h" +#include "oops/util/Duration.h" +#include "oops/util/Logger.h" + +#include "soca/Geometry/Geometry.h" +#include "soca/Increment/Increment.h" + +namespace gdasapp { + + class SocaIncr2Mom6 : public oops::Application { + public: + explicit SocaIncr2Mom6(const eckit::mpi::Comm & comm = oops::mpi::world()) + : Application(comm) {} + static const std::string classname() {return "gdasapp::SocaIncr2Mom6";} + + int execute(const eckit::Configuration & fullConfig, bool /*validate*/) const { + + /// Setup the soca geometry + const eckit::LocalConfiguration geomConfig(fullConfig, "geometry"); + oops::Log::info() << "geometry: " << std::endl << geomConfig << std::endl; + const soca::Geometry geom(geomConfig, this->getComm()); + + /// Setup the vertical geometry from the background (layer thicknesses) + // get date + std::string strdt; + fullConfig.get("date", strdt); + util::DateTime dt = util::DateTime(strdt); + + // get layer thickness variable name + oops::Variables layerVar(fullConfig, "layers variable"); + ASSERT(layerVar.size() == 1); + + // read layer thicknesses from the relevant background + soca::Increment layerThick(geom, layerVar, dt); + const eckit::LocalConfiguration vertGeomConfig(fullConfig, "vertical geometry"); + layerThick.read(vertGeomConfig); + oops::Log::info() << "layerThick: " << std::endl << layerThick << std::endl; + + // Setup the soca increment + // get the increment variables + oops::Variables socaIncrVar(fullConfig, "increment variables"); + ASSERT(socaIncrVar.size() >= 1); + + // read the soca increment + soca::Increment socaIncr(geom, socaIncrVar, dt); + const eckit::LocalConfiguration socaIncrConfig(fullConfig, "soca increment"); + socaIncr.read(socaIncrConfig); + oops::Log::info() << "socaIncr: " << std::endl << socaIncr << std::endl; + + /// Create the MOM6 IAU increment + // concatenate variables + oops::Variables mom6IauVar(socaIncrVar); + mom6IauVar += layerVar; + oops::Log::info() << "mom6IauVar: " << std::endl << mom6IauVar << std::endl; + + // append layer variable to soca increment + atlas::FieldSet socaIncrFs; + socaIncr.toFieldSet(socaIncrFs); + socaIncr.updateFields(mom6IauVar); + oops::Log::info() << "MOM6 incr: " << std::endl << socaIncr << std::endl; + + // pad layer increment with zeros + atlas::FieldSet socaLayerThickFs; + layerThick.toFieldSet(socaLayerThickFs); + layerThick.updateFields(mom6IauVar); + oops::Log::info() << "h: " << std::endl << layerThick << std::endl; + + // append layer thinckness to increment + socaIncr += layerThick; + oops::Log::info() << "MOM6 IAU increment: " << std::endl << socaIncr << std::endl; + + // Save MOM6 IAU Increment + const eckit::LocalConfiguration mom6IauConfig(fullConfig, "mom6 iau increment"); + socaIncr.write(mom6IauConfig); + + // TODO: the "checkpoint" script expects the ocean increment output to + // be in "inc.nc". Remove what's below, eventually + std::string datadir; + mom6IauConfig.get("datadir", datadir); + std::filesystem::path pathToResolve = datadir; + std::string exp; + mom6IauConfig.get("exp", exp); + std::string outputType; + mom6IauConfig.get("type", outputType); + std::string incrFname = std::filesystem::canonical(pathToResolve); + incrFname += "/ocn." + exp + "." + outputType + "." + dt.toString() + ".nc"; + const char* charPtr = incrFname.c_str(); + oops::Log::info() << "rename: " << incrFname << " to " << "inc.nc" << std::endl; + int result = std::rename(charPtr, "inc.nc"); + + return result; + } + // ----------------------------------------------------------------------------- + private: + std::string appname() const { + return "gdasapp::SocaIncr2Mom6"; + } + // ----------------------------------------------------------------------------- + }; + +} // namespace gdasapp