diff --git a/src/troute-network/troute/hyfeature_network_utilities.py b/src/troute-network/troute/hyfeature_network_utilities.py index e1fc65860..0eb102d61 100644 --- a/src/troute-network/troute/hyfeature_network_utilities.py +++ b/src/troute-network/troute/hyfeature_network_utilities.py @@ -11,6 +11,7 @@ from joblib import delayed, Parallel import pyarrow as pa import pyarrow.parquet as pq +import xarray as xr import troute.nhd_io as nhd_io @@ -23,11 +24,12 @@ def build_forcing_sets( t0 ): - run_sets = forcing_parameters.get("qlat_forcing_sets", None) - nexus_input_folder = forcing_parameters.get("nexus_input_folder", None) - nts = forcing_parameters.get("nts", None) - max_loop_size = forcing_parameters.get("max_loop_size", 12) - dt = forcing_parameters.get("dt", None) + run_sets = forcing_parameters.get("qlat_forcing_sets", None) + nexus_input_folder = forcing_parameters.get("nexus_input_folder", None) + downstream_boundary_input_folder = forcing_parameters.get("downstream_boundary_input_folder", None) + nts = forcing_parameters.get("nts", None) + max_loop_size = forcing_parameters.get("max_loop_size", 12) + dt = forcing_parameters.get("dt", None) try: nexus_input_folder = pathlib.Path(nexus_input_folder) @@ -38,7 +40,8 @@ def build_forcing_sets( raise AssertionError("Aborting simulation because the nexus_input_folder:", qlat_input_folder,"does not exist. Please check the the nexus_input_folder variable is correctly entered in the .yaml control file") from None forcing_glob_filter = forcing_parameters.get("nexus_file_pattern_filter", "*.NEXOUT") - + downstream_boundary_glob_filter = forcing_parameters.get("downstream_boundary_file_pattern_filter", "*SCHISM.nc") + if forcing_glob_filter=="nex-*": print("Reformating qlat nexus files as hourly binary files...") binary_folder = forcing_parameters.get('binary_nexus_file_folder', None) @@ -161,6 +164,64 @@ def build_forcing_sets( k += max_loop_size j += 1 + # creates downstream boundary forcing file list + if downstream_boundary_input_folder: + # get the first and seconded files from an ordered list of all forcing files + downstream_boundary_input_folder = pathlib.Path(downstream_boundary_input_folder) + all_files = sorted(downstream_boundary_input_folder.glob(downstream_boundary_glob_filter)) + first_file = all_files[0] + second_file = all_files[1] + + # Deduce the timeinterval of the forcing data from the output timestamps of the first + df = read_file(first_file) + t1_str = pd.to_datetime(df.time.iloc[0]).strftime("%Y-%m-%d_%H:%M:%S") + t1 = datetime.strptime(t1_str,"%Y-%m-%d_%H:%M:%S") + df = read_file(second_file) + t2_str = pd.to_datetime(df.time.iloc[0]).strftime("%Y-%m-%d_%H:%M:%S") + t2 = datetime.strptime(t2_str,"%Y-%m-%d_%H:%M:%S") + dt_downstream_boundary_timedelta = t2 - t1 + dt_downstream_boundary = dt_downstream_boundary_timedelta.seconds + + bts_subdivisions = dt_downstream_boundary / dt + + # the number of files required for the simulation + # For example, for 4 hrs total simulation and 2 hr max_loop, two sets of boundary data one at (t0, t0+1hr, t0+2hr) and + # the other at (t0+2hr, t0+3hr, t0+4hr) are needed. + nfiles = int(np.ceil(nts / bts_subdivisions)) + 1 + + # list of downstream boundary file datetimes + datetime_list = [t0 + dt_downstream_boundary_timedelta * (n) for n in + range(nfiles)] + datetime_list_str = [datetime.strftime(d, '%Y%m%d%H%M') for d in + datetime_list] + # list of downstream boundary files + downstream_boundary_filename_list = [d_str + downstream_boundary_glob_filter[1:] for d_str in + datetime_list_str] + + # check that all forcing files exist + for f in downstream_boundary_filename_list: + try: + J = pathlib.Path(downstream_boundary_input_folder.joinpath(f)) + assert J.is_file() == True + except AssertionError: + raise AssertionError("Aborting simulation because downstream boundary file", J, "cannot be not found.") from None + + # build run sets list + #run_sets = [] + k = 0 + j = 0 + nts_accum = 0 + nts_last = 0 + + while k < len(downstream_boundary_filename_list)-1: + if k + max_loop_size < len(downstream_boundary_filename_list): + run_sets[j]['downstream_boundary_files'] = downstream_boundary_filename_list[k:k + + max_loop_size+1] + else: + run_sets[j]['downstream_boundary_files'] = downstream_boundary_filename_list[k:] + k += max_loop_size + j += 1 + return run_sets def build_qlateral_array( @@ -296,5 +357,8 @@ def read_file(file_name): elif extension=='.parquet': df = pq.read_table(file_name).to_pandas().reset_index() df.index.name = None + elif extension=='.nc': + df = xr.open_dataset(file_name) + df = df.to_dataframe() return df \ No newline at end of file diff --git a/src/troute-network/troute/hyfeature_preprocess.py b/src/troute-network/troute/hyfeature_preprocess.py index 446f356df..a5927c357 100644 --- a/src/troute-network/troute/hyfeature_preprocess.py +++ b/src/troute-network/troute/hyfeature_preprocess.py @@ -694,30 +694,31 @@ def hyfeature_forcing( """ # Unpack user-specified forcing parameters - dt = forcing_parameters.get("dt", None) - qts_subdivisions = forcing_parameters.get("qts_subdivisions", None) - nexus_input_folder = forcing_parameters.get("nexus_input_folder", None) - qlat_file_index_col = forcing_parameters.get("qlat_file_index_col", "feature_id") - qlat_file_value_col = forcing_parameters.get("qlat_file_value_col", "q_lateral") - qlat_file_gw_bucket_flux_col = forcing_parameters.get("qlat_file_gw_bucket_flux_col", "qBucket") - qlat_file_terrain_runoff_col = forcing_parameters.get("qlat_file_terrain_runoff_col", "qSfcLatRunoff") + dt = forcing_parameters.get("dt", None) + qts_subdivisions = forcing_parameters.get("qts_subdivisions", None) + nexus_input_folder = forcing_parameters.get("nexus_input_folder", None) + qlat_file_index_col = forcing_parameters.get("qlat_file_index_col", "feature_id") + qlat_file_value_col = forcing_parameters.get("qlat_file_value_col", "q_lateral") + qlat_file_gw_bucket_flux_col = forcing_parameters.get("qlat_file_gw_bucket_flux_col", "qBucket") + qlat_file_terrain_runoff_col = forcing_parameters.get("qlat_file_terrain_runoff_col", "qSfcLatRunoff") + downstream_boundary_input_folder = forcing_parameters.get("downstream_boundary_input_folder", None) # TODO: find a better way to deal with these defaults and overrides. - run["t0"] = run.get("t0", t0) - run["nts"] = run.get("nts") - run["dt"] = run.get("dt", dt) - run["qts_subdivisions"] = run.get("qts_subdivisions", qts_subdivisions) - run["nexus_input_folder"] = run.get("nexus_input_folder", nexus_input_folder) - run["qlat_file_index_col"] = run.get("qlat_file_index_col", qlat_file_index_col) - run["qlat_file_value_col"] = run.get("qlat_file_value_col", qlat_file_value_col) - run["qlat_file_gw_bucket_flux_col"] = run.get("qlat_file_gw_bucket_flux_col", qlat_file_gw_bucket_flux_col) - run["qlat_file_terrain_runoff_col"] = run.get("qlat_file_terrain_runoff_col", qlat_file_terrain_runoff_col) + run["t0"] = run.get("t0", t0) + run["nts"] = run.get("nts") + run["dt"] = run.get("dt", dt) + run["qts_subdivisions"] = run.get("qts_subdivisions", qts_subdivisions) + run["nexus_input_folder"] = run.get("nexus_input_folder", nexus_input_folder) + run["qlat_file_index_col"] = run.get("qlat_file_index_col", qlat_file_index_col) + run["qlat_file_value_col"] = run.get("qlat_file_value_col", qlat_file_value_col) + run["qlat_file_gw_bucket_flux_col"] = run.get("qlat_file_gw_bucket_flux_col", qlat_file_gw_bucket_flux_col) + run["qlat_file_terrain_runoff_col"] = run.get("qlat_file_terrain_runoff_col", qlat_file_terrain_runoff_col) + run["downstream_boundary_input_folder"] = run.get("downstream_boundary_input_folder", downstream_boundary_input_folder) #--------------------------------------------------------------------------- # Assemble lateral inflow data #--------------------------------------------------------------------------- - start_time = time.time() LOG.info("Creating a DataFrame of lateral inflow forcings ...") @@ -741,7 +742,8 @@ def hyfeature_forcing( # Assemble coastal coupling data [WIP] #--------------------------------------------------------------------- # Run if coastal_boundary_depth_df has not already been created: - if coastal_boundary_depth_df.empty: + #if coastal_boundary_depth_df.empty: + if 1==1: coastal_boundary_elev_files = forcing_parameters.get('coastal_boundary_input_file', None) coastal_boundary_domain_files = hybrid_parameters.get('coastal_boundary_domain', None) @@ -750,11 +752,20 @@ def hyfeature_forcing( LOG.info("creating coastal dataframe ...") coastal_boundary_domain = nhd_io.read_coastal_boundary_domain(coastal_boundary_domain_files) + + # create dataframe for hourly schism data + coastal_boundary_depth_df = nhd_io.build_coastal_dataframe( + run, + coastal_boundary_domain, + ) + + # create dataframe for multi hourly schism data + ''' coastal_boundary_depth_df = nhd_io.build_coastal_ncdf_dataframe( coastal_boundary_elev_files, coastal_boundary_domain, ) - + ''' LOG.debug( "coastal boundary elevation observation DataFrame creation complete in %s seconds." \ % (time.time() - start_time) diff --git a/src/troute-network/troute/nhd_io.py b/src/troute-network/troute/nhd_io.py index 3515cd2d6..e6d04493c 100644 --- a/src/troute-network/troute/nhd_io.py +++ b/src/troute-network/troute/nhd_io.py @@ -1563,11 +1563,12 @@ def build_coastal_dataframe(coastal_boundary_elev): ) return coastal_df - +''' def build_coastal_ncdf_dataframe(coastal_ncdf): with xr.open_dataset(coastal_ncdf) as ds: coastal_ncdf_df = ds[["elev", "depth"]] return coastal_ncdf_df.to_dataframe() +''' def build_coastal_ncdf_dataframe( coastal_files, @@ -1594,7 +1595,7 @@ def build_coastal_ncdf_dataframe( tfin = start_date + dt_timeslice*len(timesteps) timestamps = pd.date_range(start_date, tfin, freq=dt_timeslice) timestamps = timestamps.strftime('%Y-%m-%d %H:%M:%S') - + import pdb; pdb.set_trace() # create a dataframe of water depth at coastal domain nodes timeslice_schism_list=[] for t in range(0, len(timesteps)+1): @@ -1619,6 +1620,45 @@ def build_coastal_ncdf_dataframe( # linearly extrapolate depth value at start date coastal_boundary_depth_df.iloc[:,0] = 2.0*coastal_boundary_depth_df.iloc[:,1] - coastal_boundary_depth_df.iloc[:,2] + return coastal_boundary_depth_df + +def build_coastal_dataframe( + run, + coastal_boundary_domain, + ): + + downstream_boundary_input_folder = run.get("downstream_boundary_input_folder",None) + + if downstream_boundary_input_folder: + downstream_boundary_input_folder = pathlib.Path(downstream_boundary_input_folder) + if "downstream_boundary_files" in run: + downstream_boundary_files = run.get("downstream_boundary_files") + downstream_boundary_files = [downstream_boundary_input_folder.joinpath(f) for f in downstream_boundary_files] + + timeslice_schism_list=[] + for f in downstream_boundary_files: + ds = xr.open_dataset(f) + df = ds.to_dataframe() + tws = [] + timestamps = [] + depths= [] + for tw, boundary_node in coastal_boundary_domain.items(): + tws.append(tw) + df2 = df[df['schism_id']==boundary_node] + date = df2.time.iloc[0] + timestamps.append(pd.to_datetime(date).strftime('%Y-%m-%d %H:%M:%S')) + depths.append(df2.elev.iat[0] + df2.depth.iat[0]) + timeslice_schism = (pd.DataFrame({ + 'stationId' : tws, + 'datetime' : timestamps, + 'depth' : depths + }). + set_index(['stationId', 'datetime']). + unstack(1, fill_value = np.nan)['depth']) + timeslice_schism_list.append(timeslice_schism) + import pdb; pdb.set_trace() + coastal_boundary_depth_df = pd.concat(timeslice_schism_list, axis=1, ignore_index=False) + return coastal_boundary_depth_df def lastobs_df_output( diff --git a/src/troute-network/troute/nhd_network_utilities_v02.py b/src/troute-network/troute/nhd_network_utilities_v02.py index e2d353e5d..31eafeaaf 100644 --- a/src/troute-network/troute/nhd_network_utilities_v02.py +++ b/src/troute-network/troute/nhd_network_utilities_v02.py @@ -8,6 +8,7 @@ import numpy as np import netCDF4 from joblib import delayed, Parallel +import xarray as xr import troute.nhd_io as nhd_io import troute.nhd_network as nhd_network @@ -253,6 +254,7 @@ def build_forcing_sets( run_sets = forcing_parameters.get("qlat_forcing_sets", None) qlat_input_folder = forcing_parameters.get("qlat_input_folder", None) + downstream_boundary_input_folder = forcing_parameters.get("downstream_boundary_input_folder", None) nts = forcing_parameters.get("nts", None) max_loop_size = forcing_parameters.get("max_loop_size", 12) dt = forcing_parameters.get("dt", None) @@ -266,6 +268,7 @@ def build_forcing_sets( raise AssertionError("Aborting simulation because the qlat_input_folder:", qlat_input_folder,"does not exist. Please check the the qlat_input_folder variable is correctly entered in the .yaml control file") from None forcing_glob_filter = forcing_parameters.get("qlat_file_pattern_filter", "*.CHRTOUT_DOMAIN1") + downstream_boundary_glob_filter = forcing_parameters.get("downstream_boundary_file_pattern_filter", "*SCHISM.nc") # TODO: Throw errors if insufficient input data are available if run_sets: @@ -357,6 +360,64 @@ def build_forcing_sets( nts_last = nts_accum k += max_loop_size j += 1 + + # creates downstream boundary forcing file list + if downstream_boundary_input_folder: + # get the first and seconded files from an ordered list of all forcing files + downstream_boundary_input_folder = pathlib.Path(downstream_boundary_input_folder) + all_files = sorted(downstream_boundary_input_folder.glob(downstream_boundary_glob_filter)) + first_file = all_files[0] + second_file = all_files[1] + + # Deduce the timeinterval of the forcing data from the output timestamps of the first + df = read_file(first_file) + t1_str = pd.to_datetime(df.time.iloc[0]).strftime("%Y-%m-%d_%H:%M:%S") + t1 = datetime.strptime(t1_str,"%Y-%m-%d_%H:%M:%S") + df = read_file(second_file) + t2_str = pd.to_datetime(df.time.iloc[0]).strftime("%Y-%m-%d_%H:%M:%S") + t2 = datetime.strptime(t2_str,"%Y-%m-%d_%H:%M:%S") + dt_downstream_boundary_timedelta = t2 - t1 + dt_downstream_boundary = dt_downstream_boundary_timedelta.seconds + + bts_subdivisions = dt_downstream_boundary / dt + # the number of files required for the simulation + nfiles = int(np.ceil(nts / bts_subdivisions)) + 1 + + # list of downstream boundary file datetimes + # n+1 because downstream boundary values, for example, during next 2hr simulation are + # values at t0, t0+1hr, t0+2hr and schism currently do not store value at t0 (to be interpolated) + datetime_list = [t0 + dt_downstream_boundary_timedelta * (n+1) for n in + range(nfiles)] + datetime_list_str = [datetime.strftime(d, '%Y%m%d%H%M') for d in + datetime_list] + # list of downstream boundary files + downstream_boundary_filename_list = [d_str + downstream_boundary_glob_filter[1:] for d_str in + datetime_list_str] + + # check that all forcing files exist + for f in downstream_boundary_filename_list: + try: + J = pathlib.Path(downstream_boundary_input_folder.joinpath(f)) + assert J.is_file() == True + except AssertionError: + raise AssertionError("Aborting simulation because downstream boundary file", J, "cannot be not found.") from None + + # build run sets list + k = 0 + j = 0 + nts_accum = 0 + nts_last = 0 + while k < len(downstream_boundary_filename_list)-1: + #run_sets.append({}) + + if k + max_loop_size < len(downstream_boundary_filename_list): + run_sets[j]['downstream_boundary_files'] = downstream_boundary_filename_list[k:k + + max_loop_size+1] + else: + run_sets[j]['downstream_boundary_files'] = downstream_boundary_filename_list[k:] + k += max_loop_size + j += 1 + return run_sets def build_qlateral_array( @@ -872,3 +933,14 @@ def build_refac_connections(diff_network_parameters): ) return connections + +def read_file(file_name): + extension = file_name.suffix + if extension=='.csv': + df = pd.read_csv(file_name) + elif extension=='.parquet': + df = pq.read_table(file_name).to_pandas().reset_index() + df.index.name = None + elif extension=='.nc': + df = xr.open_dataset(file_name) + df = df.to_dataframe() diff --git a/src/troute-network/troute/nhd_preprocess.py b/src/troute-network/troute/nhd_preprocess.py index dc303815b..43cc00c78 100644 --- a/src/troute-network/troute/nhd_preprocess.py +++ b/src/troute-network/troute/nhd_preprocess.py @@ -848,26 +848,27 @@ def nhd_forcing( """ # Unpack user-specified forcing parameters - dt = forcing_parameters.get("dt", None) - qts_subdivisions = forcing_parameters.get("qts_subdivisions", None) - qlat_input_folder = forcing_parameters.get("qlat_input_folder", None) - qlat_file_index_col = forcing_parameters.get("qlat_file_index_col", "feature_id") - qlat_file_value_col = forcing_parameters.get("qlat_file_value_col", "q_lateral") - qlat_file_gw_bucket_flux_col = forcing_parameters.get("qlat_file_gw_bucket_flux_col", "qBucket") - qlat_file_terrain_runoff_col = forcing_parameters.get("qlat_file_terrain_runoff_col", "qSfcLatRunoff") - + dt = forcing_parameters.get("dt", None) + qts_subdivisions = forcing_parameters.get("qts_subdivisions", None) + qlat_input_folder = forcing_parameters.get("qlat_input_folder", None) + qlat_file_index_col = forcing_parameters.get("qlat_file_index_col", "feature_id") + qlat_file_value_col = forcing_parameters.get("qlat_file_value_col", "q_lateral") + qlat_file_gw_bucket_flux_col = forcing_parameters.get("qlat_file_gw_bucket_flux_col", "qBucket") + qlat_file_terrain_runoff_col = forcing_parameters.get("qlat_file_terrain_runoff_col", "qSfcLatRunoff") + downstream_boundary_input_folder = forcing_parameters.get("downstream_boundary_input_folder", None) # TODO: find a better way to deal with these defaults and overrides. - run["t0"] = run.get("t0", t0) - run["nts"] = run.get("nts") - run["dt"] = run.get("dt", dt) - run["qts_subdivisions"] = run.get("qts_subdivisions", qts_subdivisions) - run["qlat_input_folder"] = run.get("qlat_input_folder", qlat_input_folder) - run["qlat_file_index_col"] = run.get("qlat_file_index_col", qlat_file_index_col) - run["qlat_file_value_col"] = run.get("qlat_file_value_col", qlat_file_value_col) - run["qlat_file_gw_bucket_flux_col"] = run.get("qlat_file_gw_bucket_flux_col", qlat_file_gw_bucket_flux_col) - run["qlat_file_terrain_runoff_col"] = run.get("qlat_file_terrain_runoff_col", qlat_file_terrain_runoff_col) - + run["t0"] = run.get("t0", t0) + run["nts"] = run.get("nts") + run["dt"] = run.get("dt", dt) + run["qts_subdivisions"] = run.get("qts_subdivisions", qts_subdivisions) + run["qlat_input_folder"] = run.get("qlat_input_folder", qlat_input_folder) + run["qlat_file_index_col"] = run.get("qlat_file_index_col", qlat_file_index_col) + run["qlat_file_value_col"] = run.get("qlat_file_value_col", qlat_file_value_col) + run["qlat_file_gw_bucket_flux_col"] = run.get("qlat_file_gw_bucket_flux_col", qlat_file_gw_bucket_flux_col) + run["qlat_file_terrain_runoff_col"] = run.get("qlat_file_terrain_runoff_col", qlat_file_terrain_runoff_col) + run["downstream_boundary_input_folder"] = run.get("downstream_boundary_input_folder", downstream_boundary_input_folder) + #--------------------------------------------------------------------------- # Assemble lateral inflow data #--------------------------------------------------------------------------- @@ -894,7 +895,8 @@ def nhd_forcing( # Assemble coastal coupling data [WIP] #--------------------------------------------------------------------- # Run if coastal_boundary_depth_df has not already been created: - if coastal_boundary_depth_df.empty: + #if coastal_boundary_depth_df.empty: + if 1==1: # when hourly schism data is used. coastal_boundary_elev_files = forcing_parameters.get('coastal_boundary_input_file', None) coastal_boundary_domain_files = hybrid_parameters.get('coastal_boundary_domain', None) @@ -903,11 +905,20 @@ def nhd_forcing( LOG.info("creating coastal dataframe ...") coastal_boundary_domain = nhd_io.read_coastal_boundary_domain(coastal_boundary_domain_files) + + # create dataframe for hourly schism data + coastal_boundary_depth_df = nhd_io.build_coastal_dataframe( + run, + coastal_boundary_domain, + ) + + # create dataframe for multi hourly schism data + ''' coastal_boundary_depth_df = nhd_io.build_coastal_ncdf_dataframe( coastal_boundary_elev_files, coastal_boundary_domain, ) - + ''' LOG.debug( "coastal boundary elevation observation DataFrame creation complete in %s seconds." \ % (time.time() - start_time) diff --git a/src/troute-routing/troute/routing/diffusive_utils.py b/src/troute-routing/troute/routing/diffusive_utils.py index 053124b1b..9556cec08 100644 --- a/src/troute-routing/troute/routing/diffusive_utils.py +++ b/src/troute-routing/troute/routing/diffusive_utils.py @@ -1000,7 +1000,7 @@ def fp_coastal_boundary_input_map( dt_db_g = (date_time_obj1 - date_time_obj0).total_seconds() nts_db_g = int((tfin_g - t0_g) * 3600.0 / dt_db_g) + 1 # include initial time 0 to the final time dbcd_g = np.ones(nts_db_g) - + if not coastal_boundary_depth_df.empty: dt_timeslice = timedelta(minutes=dt_db_g/60.0) tfin = t0 + dt_timeslice*(nts_db_g-1) diff --git a/test/unit_test_hyfeature/channel_forcing/201512010000SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010000SCHISM.nc new file mode 100644 index 000000000..5cca7c92f Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010000SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010100SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010100SCHISM.nc new file mode 100644 index 000000000..5e5fb6ca0 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010100SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010200SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010200SCHISM.nc new file mode 100644 index 000000000..816f4e51a Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010200SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010300SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010300SCHISM.nc new file mode 100644 index 000000000..441fea007 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010300SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010400SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010400SCHISM.nc new file mode 100644 index 000000000..ccf0bfe45 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010400SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010500SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010500SCHISM.nc new file mode 100644 index 000000000..33a2d6703 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010500SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010600SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010600SCHISM.nc new file mode 100644 index 000000000..5b73273ce Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010600SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010700SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010700SCHISM.nc new file mode 100644 index 000000000..5d6fb8374 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010700SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010800SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010800SCHISM.nc new file mode 100644 index 000000000..d1596153a Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010800SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512010900SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512010900SCHISM.nc new file mode 100644 index 000000000..d41392db9 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512010900SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011000SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011000SCHISM.nc new file mode 100644 index 000000000..0ba3f7143 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011000SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011100SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011100SCHISM.nc new file mode 100644 index 000000000..ca57f9101 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011100SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011200SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011200SCHISM.nc new file mode 100644 index 000000000..743d8438f Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011200SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011300SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011300SCHISM.nc new file mode 100644 index 000000000..442cd12d0 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011300SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011400SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011400SCHISM.nc new file mode 100644 index 000000000..ead97240b Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011400SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011500SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011500SCHISM.nc new file mode 100644 index 000000000..7ba2386ad Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011500SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011600SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011600SCHISM.nc new file mode 100644 index 000000000..8812012b9 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011600SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011700SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011700SCHISM.nc new file mode 100644 index 000000000..af0a02c97 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011700SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011800SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011800SCHISM.nc new file mode 100644 index 000000000..5609d12d0 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011800SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512011900SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512011900SCHISM.nc new file mode 100644 index 000000000..33ccf3d67 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512011900SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512012000SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512012000SCHISM.nc new file mode 100644 index 000000000..9722cb096 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512012000SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512012100SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512012100SCHISM.nc new file mode 100644 index 000000000..6e338876a Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512012100SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512012200SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512012200SCHISM.nc new file mode 100644 index 000000000..5228d0733 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512012200SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512012300SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512012300SCHISM.nc new file mode 100644 index 000000000..cbdeb250d Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512012300SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020000SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020000SCHISM.nc new file mode 100644 index 000000000..e8a04c311 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020000SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020100SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020100SCHISM.nc new file mode 100644 index 000000000..de925aef6 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020100SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020200SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020200SCHISM.nc new file mode 100644 index 000000000..6406e56a4 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020200SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020300SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020300SCHISM.nc new file mode 100644 index 000000000..8cf50d3a1 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020300SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020400SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020400SCHISM.nc new file mode 100644 index 000000000..23611877c Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020400SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020500SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020500SCHISM.nc new file mode 100644 index 000000000..77ab0b436 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020500SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020600SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020600SCHISM.nc new file mode 100644 index 000000000..ae31e2ddd Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020600SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020700SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020700SCHISM.nc new file mode 100644 index 000000000..fa10739c1 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020700SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020800SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020800SCHISM.nc new file mode 100644 index 000000000..3cd168031 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020800SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512020900SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512020900SCHISM.nc new file mode 100644 index 000000000..24e5341da Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512020900SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021000SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021000SCHISM.nc new file mode 100644 index 000000000..203df94f3 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021000SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021100SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021100SCHISM.nc new file mode 100644 index 000000000..3b07f7aae Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021100SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021200SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021200SCHISM.nc new file mode 100644 index 000000000..1cd6c0927 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021200SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021300SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021300SCHISM.nc new file mode 100644 index 000000000..99b1078b6 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021300SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021400SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021400SCHISM.nc new file mode 100644 index 000000000..7fe40bf00 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021400SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021500SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021500SCHISM.nc new file mode 100644 index 000000000..19d00fc01 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021500SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021600SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021600SCHISM.nc new file mode 100644 index 000000000..21aa7bd21 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021600SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021700SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021700SCHISM.nc new file mode 100644 index 000000000..a1a9689db Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021700SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021800SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021800SCHISM.nc new file mode 100644 index 000000000..95ad6612a Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021800SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512021900SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512021900SCHISM.nc new file mode 100644 index 000000000..47e850bd6 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512021900SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512022000SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512022000SCHISM.nc new file mode 100644 index 000000000..89499a16f Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512022000SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512022100SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512022100SCHISM.nc new file mode 100644 index 000000000..829e37a65 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512022100SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512022200SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512022200SCHISM.nc new file mode 100644 index 000000000..e020547f5 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512022200SCHISM.nc differ diff --git a/test/unit_test_hyfeature/channel_forcing/201512022300SCHISM.nc b/test/unit_test_hyfeature/channel_forcing/201512022300SCHISM.nc new file mode 100644 index 000000000..44f29c9f2 Binary files /dev/null and b/test/unit_test_hyfeature/channel_forcing/201512022300SCHISM.nc differ diff --git a/test/unit_test_hyfeature/domain/coastal_boundary_domain.yaml b/test/unit_test_hyfeature/domain/coastal_boundary_domain.yaml index 3374cc333..5f7a28e60 100644 --- a/test/unit_test_hyfeature/domain/coastal_boundary_domain.yaml +++ b/test/unit_test_hyfeature/domain/coastal_boundary_domain.yaml @@ -1,4 +1,4 @@ -10237: 0 # Lower Colorado River +10237: 252935 # Lower Colorado River #5781901: 252935 # Lower Colorado River diff --git a/test/unit_test_hyfeature/unittest_hyfeature.yaml b/test/unit_test_hyfeature/unittest_hyfeature.yaml index a04032d8c..e16b2085f 100644 --- a/test/unit_test_hyfeature/unittest_hyfeature.yaml +++ b/test/unit_test_hyfeature/unittest_hyfeature.yaml @@ -64,7 +64,9 @@ compute_parameters: nexus_input_folder : channel_forcing/ nexus_file_pattern_filter : "*NEXOUT.csv" #OR "*NEXOUT.parquet" OR "nex-*" binary_nexus_file_folder : binary_files # this is required if nexus_file_pattern_filter="nex-*" - coastal_boundary_input_file : channel_forcing/schout_1.nc + coastal_boundary_input_file : channel_forcing/schout_1.nc + downstream_boundary_input_folder : channel_forcing/ + downstream_boundary_file_pattern_filter : "*SCHISM.nc" nts : 48 #288 for 1day max_loop_size : 2 # [hr] data_assimilation_parameters: