diff --git a/hydromt_delft3dfm/dflowfm.py b/hydromt_delft3dfm/dflowfm.py index 1c9285e..c06764e 100644 --- a/hydromt_delft3dfm/dflowfm.py +++ b/hydromt_delft3dfm/dflowfm.py @@ -1,6 +1,7 @@ """Implement Delft3D-FM hydromt plugin model class.""" import itertools +import logging import os from datetime import datetime, timedelta from os.path import basename, dirname, isfile, join @@ -22,7 +23,7 @@ from . import DATADIR, gis_utils, mesh_utils, utils, workflows __all__ = ["DFlowFMModel"] -# logger = logging.getLogger(__name__) +logger = logging.getLogger("hydromt") class DFlowFMModel(Model): @@ -121,7 +122,6 @@ def __init__( network_snap_offset=25, snap_newbranches_to_branches_at_snapnodes=True, openwater_computation_node_distance=40, - # logger=logger, ): """Initialize the DFlowFMModel. @@ -155,8 +155,6 @@ def __init__( openwater_computation_node_distance: float, optional Global option for generation of the mesh1d network. Distance to generate mesh1d nodes for open water system (rivers, channels). By default 40 m. - logger - The logger used to log messages. """ if not isinstance(root, (str, Path)): raise ValueError("The 'root' parameter should be a of str or Path.") @@ -171,7 +169,6 @@ def __init__( mode=mode, # config_fn=config_fn, data_libs=data_libs, - # logger=logger, region_component="mesh", ) @@ -293,7 +290,7 @@ def setup_channels( -------- dflowfm._setup_branches """ - self.logger.info("Preparing 1D channels.") + logger.info("Preparing 1D channels.") # filter for allowed columns br_type = "channel" @@ -333,7 +330,6 @@ def setup_channels( snap_offset=snap_offset, allow_intersection_snapping=allow_intersection_snapping, allowed_columns=_allowed_columns, - logger=self.logger, ) # Prepare friction and crosssections channels = workflows.prepare_default_friction_and_crosssection( @@ -341,7 +337,6 @@ def setup_channels( br_type=br_type, friction_type=friction_type, friction_value=friction_value, - logger=self.logger, ) # setup crosssections @@ -358,14 +353,14 @@ def setup_channels( ) # add crosssections to exisiting ones and update geoms - self.logger.debug("Adding crosssections vector to geoms.") + logger.debug("Adding crosssections vector to geoms.") crosssections = workflows.add_crosssections( self.geoms.get("crosssections"), crosssections ) self.set_geoms(crosssections, "crosssections") # setup geoms - self.logger.debug("Adding branches and branch_nodes vector to geoms.") + logger.debug("Adding branches and branch_nodes vector to geoms.") self.set_geoms(channels, "channels") self.set_geoms(channel_nodes, "channel_nodes") @@ -517,7 +512,7 @@ def setup_rivers_from_dem( ValueError """ - self.logger.info("Preparing river shape from hydrography data.") + logger.info("Preparing river shape from hydrography data.") # parse region argument region = workflows.parse_region_geometry(region, self.crs) @@ -577,7 +572,6 @@ def setup_rivers_from_dem( smooth_length=smooth_length, constrain_estuary=constrain_estuary, constrain_rivbed=constrain_rivbed, - logger=self.logger, **kwargs, ) # Rename river properties column and reproject @@ -617,7 +611,6 @@ def setup_rivers_from_dem( dst_crs=self.crs, id_start=len(self.branches) + 1, allowed_columns=_allowed_columns, - logger=self.logger, ) # Prepare friction branches = workflows.prepare_default_friction_and_crosssection( @@ -625,7 +618,6 @@ def setup_rivers_from_dem( br_type=br_type, friction_type=friction_type, friction_value=friction_value, - logger=self.logger, ) # setup crosssections @@ -636,14 +628,14 @@ def setup_rivers_from_dem( ) # add crosssections to exisiting ones and update geoms - self.logger.debug("Adding crosssections vector to geoms.") + logger.debug("Adding crosssections vector to geoms.") crosssections = workflows.add_crosssections( self.geoms.get("crosssections"), crosssections ) self.set_geoms(crosssections, "crosssections") # setup geoms #TODO do we still need channels? - self.logger.debug("Adding rivers and river_nodes vector to geoms.") + logger.debug("Adding rivers and river_nodes vector to geoms.") self.set_geoms(rivers, "rivers") self.set_geoms(river_nodes, "rivers_nodes") @@ -769,7 +761,7 @@ def setup_rivers( dflowfm._setup_branches dflowfm._setup_crosssections """ - self.logger.info("Preparing 1D rivers.") + logger.info("Preparing 1D rivers.") # filter for allowed columns br_type = "river" _allowed_columns = [ @@ -806,7 +798,6 @@ def setup_rivers( snap_offset=snap_offset, allow_intersection_snapping=allow_intersection_snapping, allowed_columns=_allowed_columns, - logger=self.logger, ) # Prepare friction and crosssections rivers = workflows.prepare_default_friction_and_crosssection( @@ -814,7 +805,6 @@ def setup_rivers( br_type=br_type, friction_type=friction_type, friction_value=friction_value, - logger=self.logger, ) # setup crosssections @@ -848,7 +838,7 @@ def setup_rivers( ] = -1 # setup geoms for rivers and river_nodes - self.logger.debug("Adding rivers and river_nodes vector to geoms.") + logger.debug("Adding rivers and river_nodes vector to geoms.") self.set_geoms(rivers, "rivers") self.set_geoms(river_nodes, "rivers_nodes") @@ -996,7 +986,7 @@ def setup_pipes( dflowfm._setup_branches dflowfm._setup_crosssections """ - self.logger.info("Preparing 1D pipes.") + logger.info("Preparing 1D pipes.") # filter for allowed columns br_type = "pipe" @@ -1036,7 +1026,6 @@ def setup_pipes( snap_offset=snap_offset, allow_intersection_snapping=allow_intersection_snapping, allowed_columns=_allowed_columns, - logger=self.logger, ) # Prepare friction and crosssections pipes = workflows.prepare_default_friction_and_crosssection( @@ -1046,7 +1035,6 @@ def setup_pipes( friction_value=friction_value, crosssections_shape=crosssections_shape, crosssections_value=crosssections_value, - logger=self.logger, ) # filter extra time for geting clipped pipes within the region (better match) # remove the index name to avoid "ValueError: cannot insert branchid, @@ -1061,7 +1049,7 @@ def setup_pipes( inv = pipes[["invlev_up", "invlev_dn"]] if inv.isnull().sum().sum() > 0: # nodata values in pipes for invert levels fill_invlev = True - self.logger.info( + logger.info( f"{pipes_fn} data has {inv.isnull().sum().sum()} no data values" "for invert levels. Will be filled using dem_fn or" f"default value {pipes_invlev}" @@ -1070,7 +1058,7 @@ def setup_pipes( fill_invlev = False else: fill_invlev = True - self.logger.info( + logger.info( f"{pipes_fn} does not have columns [invlev_up, invlev_dn]." "Invert levels will be generated from dem_fn or" f"default value {pipes_invlev}" @@ -1092,7 +1080,7 @@ def setup_pipes( fill_invlev = False # 3. filling use pipes_invlev if fill_invlev and pipes_invlev is not None: - self.logger.warning( + logger.warning( "!Using a constant up and down invert levels for all pipes." "May cause issues when running the delft3dfm model.!" ) @@ -1117,14 +1105,14 @@ def setup_pipes( midpoint=False, ) # add crosssections to exisiting ones and update geoms - self.logger.debug("Adding crosssections vector to geoms.") + logger.debug("Adding crosssections vector to geoms.") crosssections = workflows.add_crosssections( self.geoms.get("crosssections"), crosssections ) self.set_geoms(crosssections, "crosssections") # setup geoms - self.logger.debug("Adding pipes and pipe_nodes vector to geoms.") + logger.debug("Adding pipes and pipe_nodes vector to geoms.") self.set_geoms(pipes, "pipes") self.set_geoms(pipe_nodes, "pipe_nodes") # TODO: for manholes @@ -1236,7 +1224,7 @@ def _setup_crosssections( # might require upstream/downstream # TODO: check for required columns # read crosssection from branches - self.logger.info("Preparing crossections from branch.") + logger.info("Preparing crossections from branch.") gdf_cs = workflows.set_branch_crosssections(branches, midpoint=midpoint) elif crosssections_type == "xyz": @@ -1250,7 +1238,7 @@ def _setup_crosssections( # check if feature valid if len(gdf_cs) == 0: - self.logger.warning( + logger.warning( f"No {crosssections_fn} 1D xyz crosssections found within domain" ) return None @@ -1258,7 +1246,7 @@ def _setup_crosssections( gdf_cs, required_columns=["crsid", "order", "z"] ) if not valid_attributes: - self.logger.error( + logger.error( "Required attributes [crsid, order, z] in xyz crosssections" "do not exist" ) @@ -1273,7 +1261,7 @@ def _setup_crosssections( gdf_cs.to_crs(self.crs) # set crsloc and crsdef attributes to crosssections - self.logger.info(f"Preparing 1D xyz crossections from {crosssections_fn}") + logger.info(f"Preparing 1D xyz crossections from {crosssections_fn}") gdf_cs = workflows.set_xyz_crosssections(branches, gdf_cs) elif crosssections_type == "point": @@ -1287,7 +1275,7 @@ def _setup_crosssections( # check if feature valid if len(gdf_cs) == 0: - self.logger.warning( + logger.warning( f"No {crosssections_fn} 1D point crosssections found within domain" ) return None @@ -1295,7 +1283,7 @@ def _setup_crosssections( gdf_cs, required_columns=["crsid", "shape", "shift"] ) if not valid_attributes: - self.logger.error( + logger.error( "Required attributes [crsid, shape, shift] in point crosssections" "do not exist" ) @@ -1310,7 +1298,7 @@ def _setup_crosssections( gdf_cs.to_crs(self.crs) # set crsloc and crsdef attributes to crosssections - self.logger.info(f"Preparing 1D point crossections from {crosssections_fn}") + logger.info(f"Preparing 1D point crossections from {crosssections_fn}") gdf_cs = workflows.set_point_crosssections( branches, gdf_cs, maxdist=maxdist ) @@ -1405,14 +1393,13 @@ def setup_manholes( ] # generate manhole locations and bedlevels - self.logger.info("generating manholes locations and bedlevels. ") + logger.info("generating manholes locations and bedlevels. ") manholes, branches = workflows.generate_manholes_on_branches( self.branches, bedlevel_shift=bedlevel_shift, use_branch_variables=["diameter", "width"], id_prefix="manhole_", id_suffix="_generated", - logger=self.logger, ) # FIXME Xiaohan: why do we need set_branches here? Because of branches.gui # --> add a high level write_gui files same level as write_mesh @@ -1426,7 +1413,7 @@ def setup_manholes( # read user manhole if manholes_fn: - self.logger.info(f"reading manholes street level from file {manholes_fn}. ") + logger.info(f"reading manholes street level from file {manholes_fn}. ") # read gdf_manhole = self.data_catalog.get_geodataframe( manholes_fn, @@ -1439,21 +1426,19 @@ def setup_manholes( gdf_manhole = gdf_manhole.to_crs(self.crs) # filter for allowed columns allowed_columns = set(_allowed_columns).intersection(gdf_manhole.columns) - self.logger.debug( - f'filtering for allowed columns:{",".join(allowed_columns)}' - ) + logger.debug(f'filtering for allowed columns:{",".join(allowed_columns)}') gdf_manhole = gpd.GeoDataFrame( gdf_manhole[list(allowed_columns)], crs=gdf_manhole.crs ) # replace generated manhole using user manholes - self.logger.debug("overwriting generated manholes using user manholes.") + logger.debug("overwriting generated manholes using user manholes.") manholes = hydromt.gis_utils.nearest_merge( manholes, gdf_manhole, max_dist=snap_offset, overwrite=True ) # generate manhole streetlevels from dem if dem_fn is not None: - self.logger.info("overwriting manholes street level from dem. ") + logger.info("overwriting manholes street level from dem. ") dem = self.data_catalog.get_rasterdataset( dem_fn, geom=self.region, @@ -1464,13 +1449,11 @@ def setup_manholes( manholes["_streetlevel_dem"] = dem.raster.sample(manholes).values manholes["_streetlevel_dem"].fillna(manholes["streetlevel"], inplace=True) manholes["streetlevel"] = manholes["_streetlevel_dem"] - self.logger.debug( - f'street level mean is {np.mean(manholes["streetlevel"])}' - ) + logger.debug(f'street level mean is {np.mean(manholes["streetlevel"])}') # internal administration # drop duplicated manholeid - self.logger.debug("dropping duplicated manholeid") + logger.debug("dropping duplicated manholeid") manholes.drop_duplicates(subset="manholeid") # add nodeid to manholes network1d_nodes = mesh_utils.network1d_nodes_geodataframe( @@ -1488,13 +1471,13 @@ def setup_manholes( # validate if manholes[_allowed_columns].isna().any().any(): - self.logger.error( + logger.error( "manholes contain no data." "Use manholes_defaults_fn to apply no data filling." ) # setup geoms - self.logger.debug("Adding manholes vector to geoms.") + logger.debug("Adding manholes vector to geoms.") self.set_geoms(manholes, "manholes") def setup_1dboundary( @@ -1581,7 +1564,7 @@ def setup_1dboundary( network nodes. By default 0.1, a small snapping is applied to avoid precision errors. """ - self.logger.info(f"Preparing 1D {boundary_type} boundaries for {branch_type}.") + logger.info(f"Preparing 1D {boundary_type} boundaries for {branch_type}.") # 1. get potential boundary locations based on branch_type and boundary_type boundaries_branch_type = workflows.select_boundary_type( @@ -1602,7 +1585,6 @@ def setup_1dboundary( boundary_type=boundary_type, boundary_unit=boundary_unit, snap_offset=snap_offset, - logger=self.logger, ) # 4. set boundaries @@ -1636,7 +1618,7 @@ def _read_forcing_geodataset( ): pass else: - self.logger.error( + logger.error( "Forcing has different start and end time." + " Please check the forcing file. Support yyyy-mm-dd HH:MM:SS. " ) @@ -1710,7 +1692,7 @@ def setup_1dlateral_from_points( If None, all branches are used. By defalt None. """ - self.logger.info(f"Preparing 1D laterals for {branch_type}.") + logger.info(f"Preparing 1D laterals for {branch_type}.") network_by_branchtype = self.staticgeoms[f"{branch_type}s"] # 1. read lateral geodataset and snap to network @@ -1735,7 +1717,6 @@ def setup_1dlateral_from_points( forcing_value=lateral_value, forcing_type="lateral_discharge", forcing_unit="m3/s", - logger=self.logger, ) # 3. set laterals @@ -1776,7 +1757,7 @@ def setup_1dlateral_from_polygons( or for filling in missing data. By default 0 [m3/s]. """ - self.logger.info("Preparing 1D laterals for polygons.") + logger.info("Preparing 1D laterals for polygons.") # 1. read lateral geodataset gdf_laterals, da_lat = self._read_forcing_geodataset( @@ -1793,7 +1774,6 @@ def setup_1dlateral_from_polygons( forcing_value=lateral_value, forcing_type="lateral_discharge", forcing_unit="m3/s", - logger=self.logger, ) # 3. set laterals @@ -2176,13 +2156,13 @@ def setup_mesh2d_refine( """ if "mesh2d" not in self.mesh_names: - self.logger.error( + logger.error( "2d mesh is not available, use setup_mesh2d before refinement." ) return if polygon_fn is not None: - self.logger.info(f"reading geometry from file {polygon_fn}. ") + logger.info(f"reading geometry from file {polygon_fn}. ") # read gdf = self.data_catalog.get_geodataframe( polygon_fn, geom=self.region, buffer=0, predicate="contains" @@ -2192,7 +2172,7 @@ def setup_mesh2d_refine( gdf = gdf.to_crs(self.crs) elif sample_fn is not None: - self.logger.info(f"reading samples from file {sample_fn}. ") + logger.info(f"reading samples from file {sample_fn}. ") # read da = self.data_catalog.get_rasterdataset( sample_fn, @@ -2206,7 +2186,7 @@ def setup_mesh2d_refine( ) # float64 is needed by mesh kernel to convert into c double # reproject if da.raster.crs != self.crs: - self.logger.warning( + logger.warning( "Sample grid has a different resolution than model." "Reprojecting with nearest but some information might be lost." ) @@ -2219,7 +2199,6 @@ def setup_mesh2d_refine( gdf_polygon=gdf if polygon_fn is not None else None, da_sample=da if sample_fn is not None else None, steps=steps, - logger=self.logger, ) # set mesh2d @@ -2290,7 +2269,7 @@ def setup_link1d2d( """ # check existing network if "mesh1d" not in self.mesh_names or "mesh2d" not in self.mesh_names: - self.logger.error( + logger.error( "cannot setup link1d2d: either mesh1d or mesh2d or both do not exist" ) return None @@ -2303,7 +2282,7 @@ def setup_link1d2d( # check input if polygon_fn is not None: within = self.data_catalog.get_geodataframe(polygon_fn).geometry - self.logger.info(f"adding 1d2d links only within polygon {polygon_fn}") + logger.info(f"adding 1d2d links only within polygon {polygon_fn}") else: within = None @@ -2311,16 +2290,16 @@ def setup_link1d2d( branchids = self.branches[ self.branches.branchtype == branch_type ].branchid.to_list() # use selective branches - self.logger.info(f"adding 1d2d links for {branch_type} branches.") + logger.info(f"adding 1d2d links for {branch_type} branches.") else: branchids = None # use all branches - self.logger.warning( + logger.warning( "adding 1d2d links for all branches at non boundary locations." ) # setup 1d2d links if link_direction == "1d_to_2d": - self.logger.info("setting up 1d_to_2d links.") + logger.info("setting up 1d_to_2d links.") # recompute max_length based on the diagonal distance of the max mesh area max_length = np.sqrt(self.mesh_grids["mesh2d"].area.max()) * np.sqrt(2) link1d2d = workflows.links1d2d_add_links_1d_to_2d( @@ -2329,13 +2308,13 @@ def setup_link1d2d( elif link_direction == "2d_to_1d": if link_type == "embedded": - self.logger.info("setting up 2d_to_1d embedded links.") + logger.info("setting up 2d_to_1d embedded links.") link1d2d = workflows.links1d2d_add_links_2d_to_1d_embedded( self.mesh, branchids=branchids, within=within ) elif link_type == "lateral": - self.logger.info("setting up 2d_to_1d lateral links.") + logger.info("setting up 2d_to_1d lateral links.") link1d2d = workflows.links1d2d_add_links_2d_to_1d_lateral( self.mesh, branchids=branchids, @@ -2344,14 +2323,14 @@ def setup_link1d2d( dist_factor=dist_factor, ) else: - self.logger.error(f"link_type {link_type} is not recognised.") + logger.error(f"link_type {link_type} is not recognised.") else: - self.logger.error(f"link_direction {link_direction} is not recognised.") + logger.error(f"link_direction {link_direction} is not recognised.") # Add link1d2d to xu Ugrid mesh if len(link1d2d["link1d2d"]) == 0: - self.logger.warning("No 1d2d links were generated.") + logger.warning("No 1d2d links were generated.") else: self.set_link1d2d(link1d2d) @@ -2412,7 +2391,7 @@ def setup_maps_from_rasterdataset( """ # check for name when split_dataset is False if split_dataset is False and name is None: - self.logger.error("name must be specified when split_dataset = False") + logger.error("name must be specified when split_dataset = False") # Call super method variables = super().setup_maps_from_rasterdataset( @@ -2521,7 +2500,7 @@ def setup_maps_from_raster_reclass( """ # check for name when split_dataset is False if split_dataset is False and name is None: - self.logger.error("name must be specified when split_dataset = False") + logger.error("name must be specified when split_dataset = False") # Call super method reclass_variables = super().setup_maps_from_raster_reclass( @@ -2645,7 +2624,7 @@ def setup_2dboundary( ``boundaries_timeseries_fn``. """ - self.logger.info("Preparing 2D boundaries.") + logger.info("Preparing 2D boundaries.") if boundary_type == "waterlevel": boundary_unit = "m" @@ -2675,7 +2654,7 @@ def setup_2dboundary( predicate="contains", ) if len(gdf_bnd) == 0: - self.logger.error( + logger.error( "Boundaries are not found. Check if the boundary are outside of" "recognisable boundary region (cell size * tolerance to the mesh)." ) @@ -2692,7 +2671,7 @@ def setup_2dboundary( gdf_bnd = None # 2. read timeseries boundaries if boundaries_timeseries_fn is not None: - self.logger.info("reading timeseries boundaries") + logger.info("reading timeseries boundaries") df_bnd = self.data_catalog.get_dataframe( boundaries_timeseries_fn, time_tuple=(tstart, tstop) ) # could not use open_geodataset due to line geometry @@ -2736,7 +2715,6 @@ def setup_2dboundary( boundary_value=boundary_value, boundary_type=boundary_type, boundary_unit=boundary_unit, - logger=self.logger, ) # 5. set boundaries @@ -2762,7 +2740,7 @@ def setup_rainfall_from_constant( constant_value: float Constant value for the rainfall_rate timeseries in mm/day. """ - self.logger.info("Preparing rainfall meteo forcing from uniform timeseries.") + logger.info("Preparing rainfall meteo forcing from uniform timeseries.") refdate, tstart, tstop = self.get_model_time() # time slice meteo_location = ( @@ -2785,7 +2763,6 @@ def setup_rainfall_from_constant( fill_value=constant_value, is_rate=True, meteo_location=meteo_location, - logger=self.logger, ) # 4. set meteo forcing @@ -2835,7 +2812,7 @@ def setup_rainfall_from_uniform_timeseries( Note that Delft3DFM 1D2D Suite 2022.04 supports only "rainfall_rate". """ - self.logger.info("Preparing rainfall meteo forcing from uniform timeseries.") + logger.info("Preparing rainfall meteo forcing from uniform timeseries.") refdate, tstart, tstop = self.get_model_time() # time slice meteo_location = ( @@ -2855,7 +2832,7 @@ def setup_rainfall_from_uniform_timeseries( "function arguments (eg pandas.read_csv for csv driver)." ) if (df_meteo.index[-1] - df_meteo.index[0]) < (tstop - tstart): - self.logger.warning( + logger.warning( "Time in meteo_timeseries_fn were shorter than model simulation time. " "Will fill in using fill_value." ) @@ -2870,7 +2847,6 @@ def setup_rainfall_from_uniform_timeseries( fill_value=fill_value, is_rate=is_rate, meteo_location=meteo_location, - logger=self.logger, ) # 4. set meteo forcing @@ -2886,7 +2862,7 @@ def read(self): # FIXME: where to read crs?. """ - self.logger.info(f"Reading model data from {self.root}") + logger.info(f"Reading model data from {self.root}") self.read_dimr() self.read_config() self.read_mesh() @@ -2897,10 +2873,10 @@ def read(self): def write(self): # complete model """Write the complete model schematization and configuration to file.""" - self.logger.info(f"Writing model data to {self.root}") + logger.info(f"Writing model data to {self.root}") # if in r, r+ mode, only write updated components if not self._write: - self.logger.warning("Cannot write in read-only mode") + logger.warning("Cannot write in read-only mode") return if self._maps: @@ -3029,14 +3005,14 @@ def read_maps(self) -> Dict[str, Union[xr.Dataset, xr.DataArray]]: def write_maps(self) -> None: """Write maps as tif files in maps folder and update initial fields.""" if len(self._maps) == 0: - self.logger.debug("No maps data found, skip writing.") + logger.debug("No maps data found, skip writing.") return self._assert_write_mode() # Global parameters mapsroot = join(self.root, "maps") inilist = [] paramlist = [] - self.logger.info(f"Writing maps files to {mapsroot}") + logger.info(f"Writing maps files to {mapsroot}") def _prepare_inifields(da_dict, da): # Write tif files @@ -3139,17 +3115,17 @@ def read_geoms(self) -> None: # FIXME: gives an error when only 2D model. # Add crosssections properties, should be done before friction # Branches are needed do derive locations, # self.branches should start the read if not done yet - self.logger.info("Reading cross-sections files") + logger.info("Reading cross-sections files") crosssections = utils.read_crosssections(self.branches, self.dfmmodel) # Add friction properties from roughness files - # self.logger.info("Reading friction files") + # logger.info("Reading friction files") crosssections = utils.read_friction(crosssections, self.dfmmodel) self.set_geoms(crosssections, "crosssections") # Read manholes if self.dfmmodel.geometry.storagenodefile is not None: - self.logger.info("Reading manholes file") + logger.info("Reading manholes file") network1d_nodes = mesh_utils.network1d_nodes_geodataframe( self.mesh_datasets["network1d"] ) @@ -3158,7 +3134,7 @@ def read_geoms(self) -> None: # FIXME: gives an error when only 2D model. # Read structures if self.dfmmodel.geometry.structurefile is not None: - self.logger.info("Reading structures file") + logger.info("Reading structures file") structures = utils.read_structures(self.branches, self.dfmmodel) for st_type in structures["type"].unique(): self.set_geoms(structures[structures["type"] == st_type], f"{st_type}s") @@ -3183,20 +3159,20 @@ def write_geoms(self, write_mesh_gdf=True) -> None: if "crosssections" in self._geoms: # Crosssections gdf_crs = self.geoms["crosssections"] - self.logger.info("Writting cross-sections files crsdef and crsloc") + logger.info("Writting cross-sections files crsdef and crsloc") crsdef_fn, crsloc_fn = utils.write_crosssections(gdf_crs, savedir) self.set_config("geometry.crossdeffile", crsdef_fn) self.set_config("geometry.crosslocfile", crsloc_fn) # Friction - self.logger.info("Writting friction file(s)") + logger.info("Writting friction file(s)") friction_fns = utils.write_friction(gdf_crs, savedir) self.set_config("geometry.frictfile", ";".join(friction_fns)) # Write structures # Manholes if "manholes" in self._geoms: - self.logger.info("Writting manholes file.") + logger.info("Writting manholes file.") storage_fn = utils.write_manholes( self.geoms["manholes"], savedir, @@ -3213,7 +3189,7 @@ def write_geoms(self, write_mesh_gdf=True) -> None: structures = list(itertools.chain.from_iterable(structures)) structures = pd.DataFrame(structures).replace(np.nan, None) # write - self.logger.info("Writting structures file.") + logger.info("Writting structures file.") structures_fn = utils.write_structures( structures, savedir, @@ -3308,10 +3284,10 @@ def read_forcing( def write_forcing(self) -> None: """Write forcing into hydrolib-core ext and forcing models.""" if len(self._forcing) == 0: - self.logger.debug("No forcing data found, skip writing.") + logger.debug("No forcing data found, skip writing.") else: self._assert_write_mode() - self.logger.info("Writting forcing files.") + logger.info("Writting forcing files.") savedir = dirname(join(self.root, self._config_fn)) # create new external forcing file ext_fn = "bnd.ext" @@ -3361,7 +3337,7 @@ def read_mesh(self): # https://github.com/Deltares/HYDROLIB-core/issues/561 # Add branchtype, properties from branches.gui file - self.logger.info("Reading branches GUI file") + logger.info("Reading branches GUI file") branches = utils.read_branches_gui(branches, self.dfmmodel) # Set branches @@ -3392,7 +3368,7 @@ def write_mesh(self, write_gui=True): # other mesh1d related geometry TODO update if "mesh1d" in self.mesh_names and write_gui: - self.logger.info("Writting branches.gui file") + logger.info("Writting branches.gui file") if "manholes" in self.geoms: utils.write_branches_gui(self.branches, savedir) @@ -3474,11 +3450,11 @@ def init_dfmmodel(self): # create a new MDU-Model mdu_fn = Path(join(self.root, self._config_fn)) if isfile(mdu_fn) and self._read: - self.logger.info(f"Reading mdu file at {mdu_fn}") + logger.info(f"Reading mdu file at {mdu_fn}") self._dfmmodel = FMModel(filepath=mdu_fn) else: # use hydrolib template self._assert_write_mode() - self.logger.info("Initialising empty mdu file") + logger.info("Initialising empty mdu file") self._dfmmodel = FMModel() self._dfmmodel.filepath = mdu_fn @@ -3495,12 +3471,12 @@ def read_dimr(self, dimr_fn: Optional[str] = None) -> None: dimr_fn = join(self.root, self._dimr_fn) # if file exist, read if isfile(dimr_fn) and self._read: - self.logger.info(f"Reading dimr file at {dimr_fn}") + logger.info(f"Reading dimr file at {dimr_fn}") dimr = DIMR(filepath=Path(dimr_fn)) # else initialise else: self._assert_write_mode() - self.logger.info("Initialising empty dimr file") + logger.info("Initialising empty dimr file") dimr = DIMR() self._dimr = dimr @@ -3518,7 +3494,7 @@ def write_dimr(self, dimr_fn: Optional[str] = None): if not self._read: # Updates the dimr file first before writing - self.logger.info("Adding dflowfm component to dimr config") + logger.info("Adding dflowfm component to dimr config") # update component components = self._dimr.component @@ -3541,7 +3517,7 @@ def write_dimr(self, dimr_fn: Optional[str] = None): self._dimr.control = control # write - self.logger.info(f"Writing model dimr file to {self._dimr.filepath}") + logger.info(f"Writing model dimr file to {self._dimr.filepath}") self.dimr.save(recurse=False) @property @@ -3563,7 +3539,7 @@ def set_branches(self, branches: gpd.GeoDataFrame): if "branchtype" in branches.columns: self._branches = branches else: - self.logger.error( + logger.error( "'branchtype' column absent from the new branches, could not update." ) @@ -3573,10 +3549,10 @@ def set_branches(self, branches: gpd.GeoDataFrame): _ = self.set_branches_component(name="pipe") # update geom - self.logger.debug("Adding branches vector to geoms.") + logger.debug("Adding branches vector to geoms.") self.set_geoms(branches, "branches") - self.logger.debug("Updating branches in network.") + logger.debug("Updating branches in network.") def set_branches_component(self, name: str): """Extract component name from branches and add it to geoms.""" @@ -3705,7 +3681,7 @@ def set_mesh( if overwrite_grid and "link1d2d" in self.mesh.data_vars: if grid_name == "mesh1d" or grid_name == "mesh2d": # TODO check if warning is enough or if we should remove to be sure? - self.logger.warning( + logger.warning( f"{grid_name} grid was updated in self.mesh. " "Re-run setup_link1d2d method to update the model 1D2D links." ) @@ -3742,7 +3718,7 @@ def set_link1d2d( # FIXME current implementation of below does not support updating partial # 1d2d links. Either document or adapt. #1 if "link1d2d" in self.mesh.data_vars: - self.logger.info("Overwriting existing link1d2d in self.mesh.") + logger.info("Overwriting existing link1d2d in self.mesh.") self._mesh = self._mesh.drop_vars( [ "link1d2d", @@ -3773,7 +3749,7 @@ def _check_crs(self): """Check if model crs is defined.""" if self.crs is None: if self.read: - self.logger.warning( + logger.warning( "Could not derive CRS from reading the mesh file." "Please define the CRS in the [global] init attributes before" "setting up the model." @@ -3784,4 +3760,4 @@ def _check_crs(self): "attributes before setting up the model." ) else: - self.logger.info(f"project crs: {self.crs.to_epsg()}") + logger.info(f"project crs: {self.crs.to_epsg()}") diff --git a/hydromt_delft3dfm/workflows/boundaries.py b/hydromt_delft3dfm/workflows/boundaries.py index e34a097..47c11e4 100644 --- a/hydromt_delft3dfm/workflows/boundaries.py +++ b/hydromt_delft3dfm/workflows/boundaries.py @@ -63,7 +63,6 @@ def select_boundary_type( branch_type: str, boundary_type: str, boundary_locs: str, - logger=logger, ) -> pd.DataFrame: """Select boundary location per branch type and boundary type. @@ -78,8 +77,6 @@ def select_boundary_type( For pipes 'waterlevel' is supported. boundary_locs : {'both', 'upstream', 'downstream'} The boundary location to use. - logger - The logger to log messages with. Returns ------- @@ -165,7 +162,6 @@ def compute_boundary_values( boundary_type: str = "waterlevel", boundary_unit: str = "m", snap_offset: float = 0.1, - logger=logger, ): """ Compute 1d boundary values. @@ -197,8 +193,6 @@ def compute_boundary_values( Snapping tolerance to automatically applying boundaries at the correct network nodes. By default 0.1, a small snapping is applied to avoid precision errors. - logger - Logger to log messages. """ # Timeseries boundary values if da_bnd is not None: @@ -280,7 +274,6 @@ def compute_2dboundary_values( boundary_value: float = 0.0, boundary_type: str = "waterlevel", boundary_unit: str = "m", - logger=logger, ): """ Compute 2d boundary timeseries. @@ -313,8 +306,6 @@ def compute_2dboundary_values( if ''boundary_type`` = "discharge": Allowed unit is [m3/s] By default m. - logger : - Logger to log messages. Raises ------ @@ -471,7 +462,6 @@ def compute_meteo_forcings( fill_value: float = 0.0, is_rate: bool = True, meteo_location: tuple = None, - logger=logger, ) -> xr.DataArray: """ Compute meteo forcings. @@ -493,8 +483,6 @@ def compute_meteo_forcings( If rate, unit is expected to be in mm/day and else mm. meteo_location : tuple Global location for meteo timeseries - logger - Logger to log messages. Returns ------- @@ -592,7 +580,6 @@ def compute_forcing_values_points( forcing_value: float = 0.0, forcing_type: str = "lateral_discharge", forcing_unit: str = "m3/s", - logger=logger, ): """ Compute 1d forcing values. @@ -622,8 +609,6 @@ def compute_forcing_values_points( forcing_unit : {'m3/s'} Unit corresponding to ``forcing_type``. By default 'm3/s' - logger - Logger to log messages. """ # TODO: harmonize for other point forcing #21 # first process data based on either timeseries or constant @@ -744,7 +729,6 @@ def compute_forcing_values_polygon( forcing_value: float = 0.0, forcing_type: str = "waterlevelbnd", forcing_unit: str = "m", - logger=logger, ): """ Compute 1d forcing values. @@ -774,8 +758,6 @@ def compute_forcing_values_polygon( forcing_unit : {'m3/s'} Unit corresponding to ``forcing_type``. By default 'm3/s' - logger - Logger to log messages. """ # default dims, coords and attris for polygon geometry type _dims_defaults = ["index", "numcoordinates"] diff --git a/hydromt_delft3dfm/workflows/branches.py b/hydromt_delft3dfm/workflows/branches.py index 1d0cf30..1afd378 100644 --- a/hydromt_delft3dfm/workflows/branches.py +++ b/hydromt_delft3dfm/workflows/branches.py @@ -131,7 +131,6 @@ def prepare_branches( snap_offset=snap_offset, allow_intersection_snapping=allow_intersection_snapping, smooth_branches=br_type == "pipe", - logger=logger, ) logger.info("Validating branches") validate_branches(branches) @@ -278,7 +277,6 @@ def update_data_columns_attribute_from_query( branches: gpd.GeoDataFrame, attribute: pd.DataFrame, attribute_name: str, - logger=logger, ): """ Update an attribute column of branches. @@ -360,7 +358,6 @@ def process_branches( snap_offset: float = 0.01, allow_intersection_snapping: bool = True, smooth_branches: bool = False, - logger=logger, ): """Process the branches. @@ -381,8 +378,6 @@ def process_branches( smooth_branches: bool, optional whether to return branches that are smoothed (straightend), needed for pipes Default to False. - logger - The logger to log messages with. Returns ------- @@ -400,16 +395,15 @@ def process_branches( id_col=id_col, snap_offset=snap_offset, allow_intersection_snapping=allow_intersection_snapping, - logger=logger, ) logger.debug("Splitting branches based on spacing") # TODO: add check, if spacing is used, # then in branch cross section cannot be setup later - branches = space_branches(branches, smooth_branches=smooth_branches, logger=logger) + branches = space_branches(branches, smooth_branches=smooth_branches) logger.debug("Generating branchnodes") - branch_nodes = generate_branchnodes(branches, id_col, logger=logger) + branch_nodes = generate_branchnodes(branches, id_col) return branches, branch_nodes @@ -419,7 +413,6 @@ def cleanup_branches( id_col: str = "branchid", snap_offset: float = 0.01, allow_intersection_snapping: bool = True, - logger=logger, ): """Clean up the branches. @@ -445,8 +438,6 @@ def cleanup_branches( allow_intersection_snapping : bool, optional Allow snapping at all branch ends, including intersections. Defaults to True. - logger - The logger to log messages with. Returns ------- @@ -559,7 +550,6 @@ def space_branches( branches: gpd.GeoDataFrame, spacing_col: str = "spacing", smooth_branches: bool = False, - logger=logger, ): """ Space the branches based on the spacing_col on the branch. @@ -575,8 +565,6 @@ def space_branches( The branches to clean up. spacing_col : str, optional The branch id column name. Defaults to 'spacing'. - logger - The logger to log messages with. Returns ------- @@ -598,7 +586,6 @@ def space_branches( def generate_branchnodes( branches: gpd.GeoDataFrame, id_col: str = None, - logger=logger, ): """Generate branch nodes at the branch ends. @@ -608,8 +595,6 @@ def generate_branchnodes( The branches to generate the end nodes for. id_col : str, optional The branch id column name. Defaults to None. - logger - The logger to log messages with. Returns ------- @@ -652,9 +637,8 @@ def generate_branchnodes( return nodes -def validate_branches( - branches: gpd.GeoDataFrame, logger=logger -): # TODO: add more content and maybe make a seperate module +# TODO: add more content and maybe make a seperate module +def validate_branches(branches: gpd.GeoDataFrame): """Validate the branches. Logs an error when one or more branches have a length of 0 meter. @@ -663,8 +647,6 @@ def validate_branches( ---------- branches : gpd.GeoDataFrame The branches to validate. - logger - The logger to log messages with. """ # validate pipe geometry if sum(branches.geometry.length <= 0) == 0: @@ -683,7 +665,6 @@ def split_branches( spacing_const: float = float("inf"), spacing_col: str = None, smooth_branches: bool = False, - logger=logger, ): """ Split branches based on a given spacing. @@ -707,8 +688,6 @@ def split_branches( Default to None. smooth_branches: bool, optional Switch to split branches into straight lines. By default False. - logger - The logger to log messages with. Returns ------- diff --git a/hydromt_delft3dfm/workflows/dem.py b/hydromt_delft3dfm/workflows/dem.py index 9a0b33d..d79a7af 100644 --- a/hydromt_delft3dfm/workflows/dem.py +++ b/hydromt_delft3dfm/workflows/dem.py @@ -165,7 +165,6 @@ def get_river_bathymetry( elevtn_name: str = "elevtn", uparea_name: str = "uparea", rivmsk_name: str = "rivmsk", - logger=logger, **kwargs, ) -> Tuple[gpd.GeoDataFrame, xr.DataArray]: """Estimate river bedlevel zb. diff --git a/hydromt_delft3dfm/workflows/region.py b/hydromt_delft3dfm/workflows/region.py index b4aa768..0d67aac 100644 --- a/hydromt_delft3dfm/workflows/region.py +++ b/hydromt_delft3dfm/workflows/region.py @@ -20,7 +20,7 @@ def parse_region_geometry( logger: logging.Logger = logger, ): """Parse hydromt stype region argument into region geometry.""" - kind, region = parse_region_geom(region, logger=logger) + kind, region = parse_region_geom(region) if kind == "bbox": bbox = region["bbox"] geom = gpd.GeoDataFrame(geometry=[box(*bbox)], crs=4326) diff --git a/hydromt_delft3dfm/workflows/structures.py b/hydromt_delft3dfm/workflows/structures.py index 5d4ee24..2462328 100644 --- a/hydromt_delft3dfm/workflows/structures.py +++ b/hydromt_delft3dfm/workflows/structures.py @@ -11,7 +11,7 @@ from .branches import find_nearest_branch from .crosssections import set_point_crosssections -logger = logging.getLogger(__name__) +logger = logging.getLogger("hydromt") __all__ = [ @@ -27,7 +27,6 @@ def prepare_1dstructures( id_start: int = 1, filter: str = None, snap_offset: float = 0.0, - logger: logging.Logger = logger, ) -> gpd.GeoDataFrame: """Prepare 1D structures from geodataframe. diff --git a/tests/test_hydromt.py b/tests/test_hydromt.py index 3efab90..56e4c22 100644 --- a/tests/test_hydromt.py +++ b/tests/test_hydromt.py @@ -1,11 +1,11 @@ """Test for hydromt plugin model class DFlowFMModel""" +import logging import pdb from os.path import abspath, dirname, join import pytest from hydromt.cli._utils import parse_config -# from hydromt._utils.log import setuplog from hydromt_delft3dfm import DFlowFMModel @@ -52,7 +52,8 @@ def test_model_build(tmpdir, modelname): # test build method # compare results with model from examples folder root = join(tmpdir, f"dflowfm_{modelname}") - # logger = setuplog(__name__, join(root, "hydromt.log"), log_level=10) + logger = logging.getLogger("hydromt") + logger.setLevel(10) mod1 = DFlowFMModel( root=root, mode="w", @@ -60,7 +61,6 @@ def test_model_build(tmpdir, modelname): network_snap_offset=network_snap_offset, crs=crs, openwater_computation_node_distance=openwater_computation_node_distance, - # logger=logger, ) # Build model (now excludes global section because of pop) mod1.build(opt=opt) @@ -70,7 +70,7 @@ def test_model_build(tmpdir, modelname): # Compare with model from examples folder # (need to read it again for proper geoms check) - mod1 = DFlowFMModel(root=root, mode="r") #, logger=logger) + mod1 = DFlowFMModel(root=root, mode="r")) mod1.read() root = join(EXAMPLEDIR, f"dflowfm_{modelname}") mod0 = DFlowFMModel(root=root, mode="r") @@ -102,7 +102,8 @@ def test_model_build_local_code(tmp_path): network_snap_offset = global_sect['network_snap_offset'] openwater_computation_node_distance = global_sect['openwater_computation_node_distance'] # initialize model - # logger = setuplog(__name__, join(tmp_path, "hydromt.log"), log_level=10) + logger = logging.getLogger("hydromt") + logger.setLevel(10) model = DFlowFMModel( root=tmp_path, mode="w", @@ -110,7 +111,6 @@ def test_model_build_local_code(tmp_path): network_snap_offset=network_snap_offset, crs=crs, openwater_computation_node_distance=openwater_computation_node_distance, - # logger=logger ) # build model via steps corresponding to yml order model.setup_rivers(**opt['setup_rivers']) @@ -146,7 +146,8 @@ def test_model_build_piave_code(tmp_path): network_snap_offset = global_sect['network_snap_offset'] openwater_computation_node_distance = global_sect['openwater_computation_node_distance'] # initialize model - # logger = setuplog(__name__, join(tmp_path, "hydromt.log"), log_level=10) + logger = logging.getLogger("hydromt") + logger.setLevel(10) model = DFlowFMModel( root=tmp_path, mode="w", @@ -154,7 +155,6 @@ def test_model_build_piave_code(tmp_path): network_snap_offset=network_snap_offset, crs=crs, openwater_computation_node_distance=openwater_computation_node_distance, - # logger=logger ) # build model via steps corresponding to yml order model.setup_rivers_from_dem(**opt['setup_rivers_from_dem'])