diff --git a/data_override/README.MD b/data_override/README.MD
index b35879edf..fd8396563 100644
--- a/data_override/README.MD
+++ b/data_override/README.MD
@@ -8,6 +8,7 @@
- [Converting legacy data_table to data_table.yaml](README.MD#3-converting-legacy-data_table-to-data_tableyaml)
- [Examples](README.MD#4-examples)
- [External Weight File Structure](README.MD#5-external-weight-file-structure)
+- [Ensemble and Nest Support](README.MD#6-ensemble-and-nest-support)
#### 1. YAML Data Table format:
Each entry in the data_table has the following key values:
@@ -200,3 +201,7 @@ variables:
- weight(:,:,2) -> (i,j+1)
- weight(:,:,3) -> (i+1,j)
- weight(:,:,4) -> (i+1,j+1)
+
+#### 6. Ensemble and Nest Support
+
+It may be desired to have each member of an ensemble use a different forcing file. In other to support this, FMS allows for each ensemble member to have its own data_table.yaml. For example, for a run with 2 ensemble members, fms will search for data_table_ens_01.yaml and data_table_ens_02.yaml. However, if both the data_table.yaml and the data_table_ens_* files are present, the code will crash as only 1 option is allowed. Similary, each nest can have its own data_table (data_table_nest_01.yaml), but in this case FMS will not crash if both data_table_nest_01.yaml and data_table.yaml are present. The main grid will use the data_table.yaml and the first nest will use the data_table_nest_01.yaml file.
\ No newline at end of file
diff --git a/data_override/include/data_override.inc b/data_override/include/data_override.inc
index d5cc93902..17360d0b8 100644
--- a/data_override/include/data_override.inc
+++ b/data_override/include/data_override.inc
@@ -21,7 +21,7 @@
! modules. These modules are not intended to be used directly - they should be
! used through the data_override_mod API. See data_override.F90 for details.
-use platform_mod, only: r4_kind, r8_kind, FMS_PATH_LEN
+use platform_mod, only: r4_kind, r8_kind, FMS_PATH_LEN, FMS_FILE_LEN
use yaml_parser_mod
use constants_mod, only: DEG_TO_RAD
use mpp_mod, only : mpp_error, FATAL, WARNING, NOTE, stdout, stdlog, mpp_max
@@ -45,7 +45,7 @@ use mpp_domains_mod, only : domainUG, mpp_pass_SG_to_UG, mpp_get_UG_SG_domain, N
use time_manager_mod, only: time_type, OPERATOR(>), OPERATOR(<)
use fms2_io_mod, only : FmsNetcdfFile_t, open_file, close_file, &
read_data, fms2_io_init, variable_exists, &
- get_mosaic_tile_file, file_exists
+ get_mosaic_tile_file, file_exists, get_instance_filename
use get_grid_version_mod, only: get_grid_version_1, get_grid_version_2
use fms_string_utils_mod, only: string
@@ -591,9 +591,18 @@ subroutine read_table_yaml(data_table)
integer :: nentries, mentries
integer :: i
character(len=50) :: buffer
+ character(len=FMS_FILE_LEN) :: filename !< Name of the expected data_table.yaml
integer :: file_id
- file_id = open_and_parse_file("data_table.yaml")
+ ! If doing and ensemble or nest run add the filename appendix (ens_XX or nest_XX) to the filename
+ call get_instance_filename("data_table.yaml", filename)
+ if (index(trim(filename), "ens_") .ne. 0) then
+ if (file_exists(filename) .and. file_exists("data_table.yaml")) &
+ call mpp_error(FATAL, "Both data_table.yaml and "//trim(filename)//" exists, pick one!")
+ endif
+
+ file_id = open_and_parse_file(trim(filename))
+
if (file_id==999) then
nentries = 0
else
diff --git a/diag_manager/diag_yaml_format.md b/diag_manager/diag_yaml_format.md
index d9e93c359..b561445fd 100644
--- a/diag_manager/diag_yaml_format.md
+++ b/diag_manager/diag_yaml_format.md
@@ -15,6 +15,7 @@ The purpose of this document is to explain the diag_table yaml format.
- [2.6 Sub_region Section](diag_yaml_format.md#26-sub_region-section)
- [3. More examples](diag_yaml_format.md#3-more-examples)
- [4. Schema](diag_yaml_format.md#4-schema)
+- [5. Ensemble and Nest Support](diag_yaml_format.md#5-ensemble-and-nest-support)
### 1. Converting from legacy ascii diag_table format
@@ -349,3 +350,6 @@ diag_files:
A formal specification of the file format, in the form of a JSON schema, can be
found in the [gfdl_msd_schemas](https://github.com/NOAA-GFDL/gfdl_msd_schemas)
repository on Github.
+
+### 5. Ensemble and Nest Support
+When using nests, it may be desired for a nest to have a different file frequency or number of variables from the parent grid. This may allow users to save disk space and reduce simulations time. In order to supports, FMS allows each nest to have a different diag_table.yaml from the parent grid. For example, if running with 1 test FMS will use diag_table.yaml for the parent grid and diag_table.nest_01.yaml for the first nest Similary, each ensemble member can have its own diag_table (diag_table_ens_XX.yaml, where XX is the ensemble number). However, for the ensemble case if both the diag_table.yaml and the diag_table_ens_* files are present, the code will crash as only 1 option is allowed.
\ No newline at end of file
diff --git a/diag_manager/fms_diag_yaml.F90 b/diag_manager/fms_diag_yaml.F90
index a1c9b0b80..26f631414 100644
--- a/diag_manager/fms_diag_yaml.F90
+++ b/diag_manager/fms_diag_yaml.F90
@@ -45,6 +45,7 @@ module fms_diag_yaml_mod
fms_f2c_string
use platform_mod, only: r4_kind, i4_kind, r8_kind, i8_kind, FMS_FILE_LEN
use fms_mod, only: lowercase
+use fms2_io_mod, only: file_exists, get_instance_filename
implicit none
@@ -381,10 +382,17 @@ subroutine diag_yaml_object_init(diag_subset_output)
!! outputing data at every frequency)
character(len=:), allocatable :: filename!< Diag file name (for error messages)
logical :: is_instantaneous !< .True. if the file is instantaneous (i.e no averaging)
+ character(len=FMS_FILE_LEN) :: yamlfilename !< Name of the expected diag_table.yaml
if (diag_yaml_module_initialized) return
- diag_yaml_id = open_and_parse_file("diag_table.yaml")
+ ! If doing and ensemble or nest run add the filename appendix (ens_XX or nest_XX) to the filename
+ call get_instance_filename("diag_table.yaml", yamlfilename)
+ if (index(trim(yamlfilename), "ens_") .ne. 0) then
+ if (file_exists(yamlfilename) .and. file_exists("diag_table.yaml")) &
+ call mpp_error(FATAL, "Both diag_table.yaml and "//trim(yamlfilename)//" exists, pick one!")
+ endif
+ diag_yaml_id = open_and_parse_file(trim(yamlfilename))
call diag_get_value_from_key(diag_yaml_id, 0, "title", diag_yaml%diag_title)
call get_value_from_key(diag_yaml_id, 0, "base_date", diag_yaml%diag_basedate)
diff --git a/field_manager/field_manager.F90 b/field_manager/field_manager.F90
index 5c4b44294..ba2641719 100644
--- a/field_manager/field_manager.F90
+++ b/field_manager/field_manager.F90
@@ -190,7 +190,7 @@ module field_manager_mod
use fms_mod, only : lowercase, &
write_version_number, &
check_nml_error
-use fms2_io_mod, only: file_exists
+use fms2_io_mod, only: file_exists, get_instance_filename
use platform_mod, only: r4_kind, r8_kind, FMS_PATH_LEN, FMS_FILE_LEN
#ifdef use_yaml
use fm_yaml_mod
@@ -606,18 +606,27 @@ subroutine read_field_table_yaml(nfields, table_name)
logical :: fm_success !< logical for whether fm_change_list was a success
logical :: subparams !< logical whether subparams exist in this iteration
+character(len=FMS_FILE_LEN) :: filename !< Name of the expected field_table.yaml
+
if (.not.PRESENT(table_name)) then
tbl_name = 'field_table.yaml'
else
tbl_name = trim(table_name)
endif
-if (.not. file_exists(trim(tbl_name))) then
+
+call get_instance_filename(tbl_name, filename)
+if (index(trim(filename), "ens_") .ne. 0) then
+ if (file_exists(filename) .and. file_exists(tbl_name)) &
+ call mpp_error(FATAL, "Both "//trim(tbl_name)//" and "//trim(filename)//" exists, pick one!")
+endif
+
+if (.not. file_exists(trim(filename))) then
if(present(nfields)) nfields = 0
return
endif
! Construct my_table object
-call build_fmTable(my_table, trim(tbl_name))
+call build_fmTable(my_table, trim(filename))
do h=1,size(my_table%types)
do i=1,size(my_table%types(h)%models)
diff --git a/fms2_io/fms_io_utils.F90 b/fms2_io/fms_io_utils.F90
index 85b34aa84..605c7d08e 100644
--- a/fms2_io/fms_io_utils.F90
+++ b/fms2_io/fms_io_utils.F90
@@ -824,8 +824,14 @@ subroutine get_instance_filename(name_in,name_out)
if ( i .ne. 0 ) then
name_out = name_in(1:i-1)//'.'//trim(filename_appendix)//name_in(i:length)
else
- !< If .nc is not in the name, add the appendix at the end of the file
- name_out = name_in(1:length) //'.'//trim(filename_appendix)
+ i = index(trim(name_in), ".yaml", back=.true.)
+ if (i .ne. 0) then
+ !< If .yaml is in the filename add the appendix before it
+ name_out = name_in(1:i-1)//'.'//trim(filename_appendix)//name_in(i:length)
+ else
+ !< If .nc and .yaml are not in the name, add the appendix at the end of the file
+ name_out = name_in(1:length) //'.'//trim(filename_appendix)
+ endif
end if
end if
diff --git a/test_fms/data_override/Makefile.am b/test_fms/data_override/Makefile.am
index 087bd91ea..0de57700f 100644
--- a/test_fms/data_override/Makefile.am
+++ b/test_fms/data_override/Makefile.am
@@ -73,11 +73,11 @@ TESTS_ENVIRONMENT= test_input_path="@TEST_INPUT_PATH@" \
# Run the test program.
TESTS = test_data_override2.sh test_data_override_init.sh test_data_override2_mono.sh test_data_override2_ongrid.sh \
- test_data_override2_scalar.sh test_data_override_weights.sh
+ test_data_override2_scalar.sh test_data_override_weights.sh test_data_override_ensembles.sh
# Include these files with the distribution.
EXTRA_DIST = test_data_override2.sh test_data_override_init.sh test_data_override2_mono.sh test_data_override2_ongrid.sh \
- test_data_override2_scalar.sh test_data_override_weights.sh
+ test_data_override2_scalar.sh test_data_override_weights.sh test_data_override_ensembles.sh
# Clean up
CLEANFILES = input.nml *.nc* *.out diag_table data_table data_table.yaml INPUT/* *.dpi *.spi *.dyn *.spl *-files/*
diff --git a/test_fms/data_override/test_data_override_ensembles.sh b/test_fms/data_override/test_data_override_ensembles.sh
new file mode 100755
index 000000000..afcdcd458
--- /dev/null
+++ b/test_fms/data_override/test_data_override_ensembles.sh
@@ -0,0 +1,99 @@
+#!/bin/sh
+
+#***********************************************************************
+#* GNU Lesser General Public License
+#*
+#* This file is part of the GFDL Flexible Modeling System (FMS).
+#*
+#* FMS is free software: you can redistribute it and/or modify it under
+#* the terms of the GNU Lesser General Public License as published by
+#* the Free Software Foundation, either version 3 of the License, or (at
+#* your option) any later version.
+#*
+#* FMS is distributed in the hope that it will be useful, but WITHOUT
+#* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+#* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+#* for more details.
+#*
+#* You should have received a copy of the GNU Lesser General Public
+#* License along with FMS. If not, see .
+#***********************************************************************
+#
+# Copyright (c) 2019-2021 Ed Hartnett, Uriel Ramirez, Seth Underwood
+
+# Set common test settings.
+. ../test-lib.sh
+
+output_dir
+[ ! -d "INPUT" ] && mkdir -p "INPUT"
+
+cat <<_EOF > data_table.ens_01.yaml
+data_table:
+ - grid_name: OCN
+ fieldname_in_model: runoff
+ override_file:
+ - fieldname_in_file: runoff
+ file_name: INPUT/runoff.daitren.clim.1440x1080.v20180328_ens_01.nc
+ interp_method: none
+ factor: 1.0
+_EOF
+
+cat <<_EOF > data_table.ens_02.yaml
+data_table:
+ - grid_name: OCN
+ fieldname_in_model: runoff
+ override_file:
+ - fieldname_in_file: runoff
+ file_name: INPUT/runoff.daitren.clim.1440x1080.v20180328_ens_02.nc
+ interp_method: none
+ factor: 1.0
+_EOF
+
+cat <<_EOF > input_base.nml
+&data_override_nml
+ use_data_table_yaml = .True.
+/
+
+&test_data_override_ongrid_nml
+ test_case = 5
+ write_only = .False.
+/
+
+&ensemble_nml
+ ensemble_size = 2
+/
+_EOF
+
+#The test only runs with yaml
+if [ -z $parser_skip ]; then
+ for KIND in r4 r8
+ do
+ rm -rf INPUT/.
+ sed 's/write_only = .False./write_only = .True./g' input_base.nml > input.nml
+ test_expect_success "Creating input files (${KIND})" '
+ mpirun -n 12 ../test_data_override_ongrid_${KIND}
+ '
+
+ cp input_base.nml input.nml
+ test_expect_success "test_data_override with two ensembles -yaml (${KIND})" '
+ mpirun -n 12 ../test_data_override_ongrid_${KIND}
+ '
+ done
+
+cat <<_EOF > data_table.yaml
+data_table:
+ - grid_name: OCN
+ fieldname_in_model: runoff
+ override_file:
+ - fieldname_in_file: runoff
+ file_name: INPUT/runoff.daitren.clim.1440x1080.v20180328_ens_02.nc
+ interp_method: none
+ factor: 1.0
+_EOF
+
+ test_expect_failure "test_data_override with both data_table.yaml and data_table.ens_xx.yaml files" '
+ mpirun -n 12 ../test_data_override_ongrid_${KIND}
+ '
+rm -rf INPUT
+fi
+test_done
diff --git a/test_fms/data_override/test_data_override_ongrid.F90 b/test_fms/data_override/test_data_override_ongrid.F90
index 8f1fe43f7..d8e3864ba 100644
--- a/test_fms/data_override/test_data_override_ongrid.F90
+++ b/test_fms/data_override/test_data_override_ongrid.F90
@@ -26,13 +26,15 @@ program test_data_override_ongrid
use mpp_domains_mod, only: mpp_define_domains, mpp_define_io_domain, mpp_get_data_domain, &
mpp_domains_set_stack_size, mpp_get_compute_domain, domain2d
use mpp_mod, only: mpp_init, mpp_exit, mpp_pe, mpp_root_pe, mpp_error, FATAL, &
- input_nml_file, mpp_sync, NOTE
+ input_nml_file, mpp_sync, NOTE, mpp_npes, mpp_get_current_pelist, &
+ mpp_set_current_pelist
use data_override_mod, only: data_override_init, data_override
use fms2_io_mod
use time_manager_mod, only: set_calendar_type, time_type, set_date, NOLEAP
use netcdf, only: nf90_create, nf90_def_dim, nf90_def_var, nf90_enddef, nf90_put_var, &
nf90_close, nf90_put_att, nf90_clobber, nf90_64bit_offset, nf90_char, &
nf90_double, nf90_unlimited
+use ensemble_manager_mod, only: get_ensemble_size, ensemble_manager_init
use fms_mod, only: string, fms_init, fms_end
implicit none
@@ -52,7 +54,12 @@ program test_data_override_ongrid
integer, parameter :: bilinear = 2
integer, parameter :: scalar = 3
integer, parameter :: weight_file = 4
+integer, parameter :: ensemble_case = 5
integer :: test_case = ongrid
+integer :: npes
+integer, allocatable :: pelist(:)
+integer, allocatable :: pelist_ens(:)
+integer :: ensemble_id
logical :: write_only=.false. !< True if creating the input files only
namelist / test_data_override_ongrid_nml / nhalox, nhaloy, test_case, nlon, nlat, layout, write_only
@@ -70,12 +77,27 @@ program test_data_override_ongrid
call set_calendar_type(NOLEAP)
+npes = mpp_npes()
+allocate(pelist(npes))
+call mpp_get_current_pelist(pelist)
+
+select case (test_case)
+case (ensemble_case)
+ call set_up_ensemble_case()
+end select
+
!< Create a domain nlonXnlat with mask
call mpp_domains_set_stack_size(17280000)
call mpp_define_domains( (/1,nlon,1,nlat/), layout, Domain, xhalo=nhalox, yhalo=nhaloy, name='test_data_override_emc')
call mpp_define_io_domain(Domain, (/1,1/))
call mpp_get_data_domain(Domain, is, ie, js, je)
+select case (test_case)
+case (ensemble_case)
+ ! Go back to the full pelist
+ call mpp_set_current_pelist(pelist)
+end select
+
if (write_only) then
select case (test_case)
case (ongrid)
@@ -86,12 +108,20 @@ program test_data_override_ongrid
call generate_scalar_input_file ()
case (weight_file)
call generate_weight_input_file ()
+ case (ensemble_case)
+ call generate_ensemble_input_file()
end select
call mpp_sync()
call mpp_error(NOTE, "Finished creating INPUT Files")
else
+ select case (test_case)
+ case (ensemble_case)
+ !< Go back to the ensemble pelist
+ call mpp_set_current_pelist(pelist_ens)
+ end select
+
!< Initiliaze data_override
call data_override_init(Ocean_domain_in=Domain, mode=lkind)
@@ -104,6 +134,9 @@ program test_data_override_ongrid
call scalar_test()
case (weight_file)
call weight_file_test()
+ case (ensemble_case)
+ call ensemble_test()
+ call mpp_set_current_pelist(pelist)
end select
endif
@@ -218,17 +251,29 @@ subroutine create_ocean_hgrid_file()
endif
end subroutine create_ocean_hgrid_file
-subroutine create_ongrid_data_file()
+subroutine create_ongrid_data_file(is_ensemble)
+ logical, intent(in), optional :: is_ensemble
type(FmsNetcdfFile_t) :: fileobj
character(len=10) :: dimnames(3)
real(lkind), allocatable, dimension(:,:,:) :: runoff_in
real(lkind), allocatable, dimension(:) :: time_data
+ integer :: offset
+ character(len=256), allocatable :: appendix
+
integer :: i
+ offset = 0
+ appendix = ""
+ if (present(is_ensemble)) then
+ offset = ensemble_id
+ call get_filename_appendix(appendix)
+ appendix = "_"//trim(appendix)
+ endif
+
allocate(runoff_in(nlon, nlat, 10))
allocate(time_data(10))
do i = 1, 10
- runoff_in(:,:,i) = real(i, lkind)
+ runoff_in(:,:,i) = real(i+offset, lkind)
enddo
time_data = (/1., 2., 3., 5., 6., 7., 8., 9., 10., 11./)
@@ -236,7 +281,7 @@ subroutine create_ongrid_data_file()
dimnames(2) = 'j'
dimnames(3) = 'time'
- if (open_file(fileobj, 'INPUT/runoff.daitren.clim.1440x1080.v20180328.nc', 'overwrite')) then
+ if (open_file(fileobj, 'INPUT/runoff.daitren.clim.1440x1080.v20180328'//trim(appendix)//'.nc', 'overwrite')) then
call register_axis(fileobj, "i", nlon)
call register_axis(fileobj, "j", nlat)
call register_axis(fileobj, "time", unlimited)
@@ -609,4 +654,84 @@ subroutine scalar_test()
end subroutine scalar_test
+subroutine set_up_ensemble_case()
+ integer :: ens_siz(6)
+ character(len=10) :: text
+
+ if (npes .ne. 12) &
+ call mpp_error(FATAL, "This test requires 12 pes to run")
+
+ if (layout(1)*layout(2) .ne. 6) &
+ call mpp_error(FATAL, "The two members of the layout do not equal 6")
+
+ call ensemble_manager_init
+ ens_siz = get_ensemble_size()
+ if (ens_siz(1) .ne. 2) &
+ call mpp_error(FATAL, "This test requires 2 ensembles")
+
+ if (mpp_pe() < 6) then
+ !PEs 0-5 are the first ensemble
+ ensemble_id = 1
+ allocate(pelist_ens(npes/ens_siz(1)))
+ pelist_ens = pelist(1:6)
+ call mpp_set_current_pelist(pelist_ens)
+ else
+ !PEs 6-11 are the second ensemble
+ ensemble_id = 2
+ allocate(pelist_ens(npes/ens_siz(1)))
+ pelist_ens = pelist(7:)
+ call mpp_set_current_pelist(pelist_ens)
+ endif
+
+ write( text,'(a,i2.2)' ) 'ens_', ensemble_id
+ call set_filename_appendix(trim(text))
+
+ if (mpp_pe() .eq. mpp_root_pe()) &
+ print *, "ensemble_id:", ensemble_id, ":: ", pelist_ens
+end subroutine
+
+subroutine generate_ensemble_input_file()
+ if (mpp_pe() .eq. mpp_root_pe()) then
+ call create_grid_spec_file ()
+ call create_ocean_mosaic_file()
+ call create_ocean_hgrid_file()
+ endif
+
+ !< Go back to the ensemble pelist so that each root pe can write its own input file
+ call mpp_set_current_pelist(pelist_ens)
+ if (mpp_pe() .eq. mpp_root_pe()) then
+ call create_ongrid_data_file(is_ensemble=.true.)
+ endif
+ call mpp_set_current_pelist(pelist)
+end subroutine
+
+subroutine ensemble_test()
+ real(lkind) :: expected_result !< Expected result from data_override
+ type(time_type) :: Time !< Time
+ real(lkind), allocatable, dimension(:,:) :: runoff !< Data to be written
+
+ allocate(runoff(is:ie,js:je))
+
+ runoff = 999._lkind
+ !< Run it when time=3
+ Time = set_date(1,1,4,0,0,0)
+ call data_override('OCN','runoff',runoff, Time)
+ !< Because you are getting the data when time=3, and this is an "ongrid" case, the expected result is just
+ !! equal to the data at time=3, which is 3+ensemble_id.
+ expected_result = 3._lkind + real(ensemble_id,kind=lkind)
+ call compare_data(Domain, runoff, expected_result)
+
+ !< Run it when time=4
+ runoff = 999._lkind
+ Time = set_date(1,1,5,0,0,0)
+ call data_override('OCN','runoff',runoff, Time)
+ !< You are getting the data when time=4, the data at time=3 is 3+ensemble_id. and at time=5 is 4+ensemble_id.,
+ !! so the expected result is the average of the 2 (because this is is an "ongrid" case and there
+ !! is no horizontal interpolation).
+ expected_result = (3._lkind + real(ensemble_id,kind=lkind) + 4._lkind + real(ensemble_id,kind=lkind)) / 2._lkind
+ call compare_data(Domain, runoff, expected_result)
+
+ deallocate(runoff)
+end subroutine ensemble_test
+
end program test_data_override_ongrid
diff --git a/test_fms/diag_manager/Makefile.am b/test_fms/diag_manager/Makefile.am
index a224eb245..2d7d6440a 100644
--- a/test_fms/diag_manager/Makefile.am
+++ b/test_fms/diag_manager/Makefile.am
@@ -34,7 +34,7 @@ check_PROGRAMS = test_diag_manager test_diag_manager_time \
check_time_min check_time_max check_time_sum check_time_avg test_diag_diurnal check_time_diurnal \
check_time_pow check_time_rms check_subregional test_cell_measures test_var_masks \
check_var_masks test_multiple_send_data test_diag_out_yaml test_output_every_freq \
- test_dm_weights test_prepend_date
+ test_dm_weights test_prepend_date test_ens_runs
# This is the source code for the test.
test_output_every_freq_SOURCES = test_output_every_freq.F90
@@ -65,6 +65,7 @@ test_var_masks_SOURCES = test_var_masks.F90
check_var_masks_SOURCES = check_var_masks.F90
test_multiple_send_data_SOURCES = test_multiple_send_data.F90
test_prepend_date_SOURCES = test_prepend_date.F90
+test_ens_runs_SOURCES = test_ens_runs.F90
TEST_EXTENSIONS = .sh
SH_LOG_DRIVER = env AM_TAP_AWK='$(AWK)' $(SHELL) \
@@ -74,7 +75,7 @@ SH_LOG_DRIVER = env AM_TAP_AWK='$(AWK)' $(SHELL) \
TESTS = test_diag_manager2.sh test_time_none.sh test_time_min.sh test_time_max.sh test_time_sum.sh \
test_time_avg.sh test_time_pow.sh test_time_rms.sh test_time_diurnal.sh test_cell_measures.sh \
test_subregional.sh test_var_masks.sh test_multiple_send_data.sh test_output_every_freq.sh \
- test_dm_weights.sh test_flush_nc_file.sh test_prepend_date.sh
+ test_dm_weights.sh test_flush_nc_file.sh test_prepend_date.sh test_ens_runs.sh
testing_utils.mod: testing_utils.$(OBJEXT)
@@ -82,7 +83,8 @@ testing_utils.mod: testing_utils.$(OBJEXT)
EXTRA_DIST = test_diag_manager2.sh check_crashes.sh test_time_none.sh test_time_min.sh test_time_max.sh \
test_time_sum.sh test_time_avg.sh test_time_pow.sh test_time_rms.sh test_time_diurnal.sh \
test_cell_measures.sh test_subregional.sh test_var_masks.sh test_multiple_send_data.sh \
- test_flush_nc_file.sh test_dm_weights.sh test_output_every_freq.sh test_prepend_date.sh
+ test_flush_nc_file.sh test_dm_weights.sh test_output_every_freq.sh test_prepend_date.sh \
+ test_ens_runs.sh
if USING_YAML
skipflag=""
diff --git a/test_fms/diag_manager/test_ens_runs.F90 b/test_fms/diag_manager/test_ens_runs.F90
new file mode 100644
index 000000000..621016430
--- /dev/null
+++ b/test_fms/diag_manager/test_ens_runs.F90
@@ -0,0 +1,128 @@
+!***********************************************************************
+!* GNU Lesser General Public License
+!*
+!* This file is part of the GFDL Flexible Modeling System (FMS).
+!*
+!* FMS is free software: you can redistribute it and/or modify it under
+!* the terms of the GNU Lesser General Public License as published by
+!* the Free Software Foundation, either version 3 of the License, or (at
+!* your option) any later version.
+!*
+!* FMS is distributed in the hope that it will be useful, but WITHOUT
+!* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+!* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+!* for more details.
+!*
+!* You should have received a copy of the GNU Lesser General Public
+!* License along with FMS. If not, see .
+!***********************************************************************
+
+!> @brief This programs tests diag manager when the file frequency is set to 0 days
+program test_ens_runs
+
+ use fms_mod, only: fms_init, fms_end, string
+ use diag_manager_mod, only: diag_axis_init, send_data, diag_send_complete, diag_manager_set_time_end, &
+ register_diag_field, diag_manager_init, diag_manager_end, register_static_field, &
+ diag_axis_init
+ use time_manager_mod, only: time_type, operator(+), JULIAN, set_time, set_calendar_type, set_date
+ use mpp_mod, only: FATAL, mpp_error, mpp_npes, mpp_pe, mpp_get_current_pelist, mpp_set_current_pelist
+ use fms2_io_mod, only: FmsNetcdfFile_t, open_file, close_file, read_data, get_dimension_size, &
+ set_filename_appendix, get_instance_filename
+ use ensemble_manager_mod, only: get_ensemble_size, ensemble_manager_init
+
+ implicit none
+
+ integer :: id_var0 !< diag field ids
+ integer :: id_axis1 !< Id for axis
+ logical :: used !< for send_data calls
+ integer :: ntimes = 48 !< Number of time steps
+ real :: vdata !< Buffer to store the data
+ type(time_type) :: Time !< "Model" time
+ type(time_type) :: Time_step !< Time step for the "simulation"
+ integer :: i !< For do loops
+ integer :: npes !< Number of pes in the current pelist
+ integer, allocatable :: pelist(:) !< Full pelist
+ integer :: ensemble_id !< The ensemble id
+ integer :: ens_siz(6) !< The size of the ensemble
+ character(len=10) :: text !< The filename appendix
+ integer :: expected_ntimes
+
+ call fms_init
+ call ensemble_manager_init
+ npes = mpp_npes()
+ if (npes .ne. 2) &
+ call mpp_error(FATAL, "This test requires two pes to run")
+
+ allocate(pelist(npes))
+ call mpp_get_current_pelist(pelist)
+
+ ens_siz = get_ensemble_size()
+ if (ens_siz(1) .ne. 2) &
+ call mpp_error(FATAL, "This test requires 2 ensembles")
+
+ if (mpp_pe() < 1) then
+ !< PE 0 is the first ensemble
+ ensemble_id = 1
+ call mpp_set_current_pelist((/0/))
+ expected_ntimes = 48
+ else
+ ensemble_id = 2
+ call mpp_set_current_pelist((/1/))
+ expected_ntimes = 24
+ endif
+
+ write( text,'(a,i2.2)' ) 'ens_', ensemble_id
+ call set_filename_appendix(trim(text))
+
+ call set_calendar_type(JULIAN)
+ call diag_manager_init
+
+ Time = set_date(2,1,1,0,0,0)
+ Time_step = set_time (3600,0) !< 1 hour
+ call diag_manager_set_time_end(set_date(2,1,3,0,0,0))
+
+ id_var0 = register_diag_field ('ocn_mod', 'var0', Time)
+
+ do i = 1, ntimes
+ Time = Time + Time_step
+ vdata = real(i)
+
+ used = send_data(id_var0, vdata, Time)
+ call diag_send_complete(Time_step)
+ enddo
+
+ call diag_manager_end(Time)
+
+ call check_output()
+ call fms_end
+
+ contains
+
+ !< @brief Check the diag manager output
+ subroutine check_output()
+ type(FmsNetcdfFile_t) :: fileobj !< Fms2io fileobj
+ integer :: var_size !< Size of the variable reading
+ real, allocatable :: var_data(:) !< Buffer to read variable data to
+ integer :: j !< For looping
+ character(len=255) :: filename !< Name of the diag file
+
+ call get_instance_filename("test_ens.nc", filename)
+ if (.not. open_file(fileobj, filename, "read")) &
+ call mpp_error(FATAL, "Error opening file:"//trim(filename)//" to read")
+
+ call get_dimension_size(fileobj, "time", var_size)
+ if (var_size .ne. expected_ntimes) call mpp_error(FATAL, "The dimension of time in the file:"//&
+ "test_ens is not the correct size!")
+ allocate(var_data(var_size))
+ var_data = -999.99
+
+ call read_data(fileobj, "var0", var_data)
+ do j = 1, var_size
+ if (var_data(j) .ne. real(j * ensemble_id))&
+ call mpp_error(FATAL, "The variable data for var1 at time level:"//&
+ string(j)//" is not the correct value!")
+ enddo
+
+ call close_file(fileobj)
+ end subroutine check_output
+end program test_ens_runs
diff --git a/test_fms/diag_manager/test_ens_runs.sh b/test_fms/diag_manager/test_ens_runs.sh
new file mode 100755
index 000000000..b2e262b69
--- /dev/null
+++ b/test_fms/diag_manager/test_ens_runs.sh
@@ -0,0 +1,97 @@
+#!/bin/sh
+
+#***********************************************************************
+#* GNU Lesser General Public License
+#*
+#* This file is part of the GFDL Flexible Modeling System (FMS).
+#*
+#* FMS is free software: you can redistribute it and/or modify it under
+#* the terms of the GNU Lesser General Public License as published by
+#* the Free Software Foundation, either version 3 of the License, or (at
+#* your option) any later version.
+#*
+#* FMS is distributed in the hope that it will be useful, but WITHOUT
+#* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+#* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+#* for more details.
+#*
+#* You should have received a copy of the GNU Lesser General Public
+#* License along with FMS. If not, see .
+#***********************************************************************
+
+# Copyright (c) 2019-2020 Ed Hartnett, Seth Underwood
+
+# Set common test settings.
+. ../test-lib.sh
+
+if [ -z "${skipflag}" ]; then
+# create and enter directory for in/output files
+output_dir
+
+cat <<_EOF > diag_table.ens_01.yaml
+title: test_diag_manager_01
+base_date: 2 1 1 0 0 0
+diag_files:
+- file_name: test_ens
+ time_units: days
+ unlimdim: time
+ freq: 1 hours
+ varlist:
+ - module: ocn_mod
+ var_name: var0
+ reduction: none
+ kind: r8
+_EOF
+
+cat <<_EOF > diag_table.ens_02.yaml
+title: test_diag_manager_01
+base_date: 2 1 1 0 0 0
+diag_files:
+- file_name: test_ens
+ time_units: days
+ unlimdim: time
+ freq: 2 hours
+ varlist:
+ - module: ocn_mod
+ var_name: var0
+ reduction: none
+ kind: r8
+_EOF
+
+cat <<_EOF > input.nml
+&diag_manager_nml
+ use_modern_diag = .True.
+/
+
+&ensemble_nml
+ ensemble_size = 2
+/
+_EOF
+
+my_test_count=1
+test_expect_success "Running diag_manager with 2 ensembles (test $my_test_count)" '
+ mpirun -n 2 ../test_ens_runs
+'
+
+cat <<_EOF > diag_table.yaml
+title: test_diag_manager_01
+base_date: 2 1 1 0 0 0
+diag_files:
+- file_name: test_0days
+ time_units: days
+ unlimdim: time
+ freq: 0 days
+ varlist:
+ - module: ocn_mod
+ var_name: var0
+ reduction: none
+ kind: r8
+_EOF
+
+my_test_count=`expr $my_test_count + 1`
+test_expect_failure "Running diag_manager with both diag_table.yaml and diag_table.ens_xx.yaml files present (test $my_test_count)" '
+ mpirun -n 2 ../test_ens_runs
+'
+
+fi
+test_done
diff --git a/test_fms/field_manager/test_field_manager2.sh b/test_fms/field_manager/test_field_manager2.sh
index 248570159..313c830a7 100755
--- a/test_fms/field_manager/test_field_manager2.sh
+++ b/test_fms/field_manager/test_field_manager2.sh
@@ -106,6 +106,48 @@ else
test_expect_success "field table read with use_field_table.yaml = .true." 'mpirun -n 1 ./test_field_table_read'
test_expect_success "field manager functional r4 with yaml table" 'mpirun -n 2 ./test_field_manager_r4'
test_expect_success "field manager functional r8 with yaml table" 'mpirun -n 2 ./test_field_manager_r8'
+
+ cat <<_EOF > field_table.ens_01.yaml
+field_table:
+- field_type: tracer
+ modlist:
+ - model_type: atmos_mod
+ varlist:
+ - variable: radon
+ - variable: radon2
+ - variable: radon3
+ longname: bad radon!
+_EOF
+
+ cat <<_EOF > field_table.ens_02.yaml
+field_table:
+- field_type: tracer
+ modlist:
+ - model_type: atmos_mod
+ varlist:
+ - variable: radon
+ - variable: radon2
+ - variable: radon3
+ longname: bad radon!
+ - variable: radon4
+ longname: REALLY bad radon!
+_EOF
+cat <<_EOF > input.nml
+&field_manager_nml
+ use_field_table_yaml = .true.
+/
+&test_field_table_read_nml
+ test_case = 1
+/
+&ensemble_nml
+ ensemble_size = 2
+/
+_EOF
+ test_expect_failure "field manager test with both field_table.yaml and field_table.ens_XX.yaml files present" 'mpirun -n 2 ./test_field_table_read'
+
+ rm -rf field_table.yaml
+
+ test_expect_success "field manager test with 2 ensembles" 'mpirun -n 2 ./test_field_table_read'
fi
test_done
diff --git a/test_fms/field_manager/test_field_table_read.F90 b/test_fms/field_manager/test_field_table_read.F90
index ba9b125a4..bb46256cb 100644
--- a/test_fms/field_manager/test_field_table_read.F90
+++ b/test_fms/field_manager/test_field_table_read.F90
@@ -36,15 +36,74 @@ program test_field_table_read
use field_manager_mod, only: field_manager_init
use fms_mod, only: fms_init, fms_end
-use mpp_mod, only : mpp_pe, mpp_root_pe, mpp_error, NOTE, FATAL
+use fms2_io_mod, only: set_filename_appendix
+use ensemble_manager_mod, only: get_ensemble_size, ensemble_manager_init
+use mpp_mod, only : mpp_pe, mpp_root_pe, mpp_error, NOTE, FATAL, input_nml_file, mpp_npes, &
+ mpp_set_current_pelist, mpp_get_current_pelist
implicit none
integer :: nfields
+integer :: nfields_expected
+integer :: io_status
+integer :: npes
+integer, allocatable :: pelist(:)
+integer :: ens_siz(6)
+integer :: ensemble_id
+character(len=10) :: text
+integer, parameter :: default_test = 0
+integer, parameter :: ensemble_test = 1
+
+! namelist parameters
+integer :: test_case = default_test
+
+namelist / test_field_table_read_nml / test_case
call fms_init
+read (input_nml_file, test_field_table_read_nml, iostat=io_status)
+if (io_status > 0) call mpp_error(FATAL,'=>test_field_table_read: Error reading input.nml')
+
+npes = mpp_npes()
+allocate(pelist(npes))
+call mpp_get_current_pelist(pelist)
+
+nfields_expected = 4
+select case (test_case)
+case (ensemble_test)
+ if (npes .ne. 2) &
+ call mpp_error(FATAL, "test_field_table_read:: this test requires 2 PEs!")
+
+ call ensemble_manager_init
+ ens_siz = get_ensemble_size()
+ if (ens_siz(1) .ne. 2) &
+ call mpp_error(FATAL, "This test requires 2 ensembles")
+
+ if (mpp_pe() .eq. 0) then
+ !PEs 0 is the first ensemble
+ ensemble_id = 1
+ call mpp_set_current_pelist((/0/))
+ nfields_expected = 3
+ else
+ !PEs 1 is the second ensemble
+ ensemble_id = 2
+ call mpp_set_current_pelist((/1/))
+ nfields_expected = 4
+ endif
+
+ write( text,'(a,i2.2)' ) 'ens_', ensemble_id
+ call set_filename_appendix(trim(text))
+
+end select
+
call field_manager_init(nfields)
-if (nfields .ne. 4) &
+print *, nfields
+if (nfields .ne. nfields_expected) &
call mpp_error(FATAL, "test_field_table_read:: The number fields returned is not the expected result")
+
+select case (test_case)
+case (ensemble_test)
+ call mpp_set_current_pelist(pelist)
+end select
+
call fms_end
end program test_field_table_read