diff --git a/notebooks/drents_overijsselse_delta/00_get_model.py b/notebooks/drents_overijsselse_delta/00_get_model.py index 2cf928b..912d599 100644 --- a/notebooks/drents_overijsselse_delta/00_get_model.py +++ b/notebooks/drents_overijsselse_delta/00_get_model.py @@ -12,4 +12,4 @@ ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") if ribasim_toml.exists(): - ribasim_toml.rename(ribasim_toml.with_name(f"{short_name}.toml")) + ribasim_toml.replace(ribasim_toml.with_name(f"{short_name}.toml")) diff --git a/notebooks/drents_overijsselse_delta/01_fix_model_network.py b/notebooks/drents_overijsselse_delta/01_fix_model_network.py index 5ec802b..645d001 100644 --- a/notebooks/drents_overijsselse_delta/01_fix_model_network.py +++ b/notebooks/drents_overijsselse_delta/01_fix_model_network.py @@ -25,20 +25,22 @@ layer="duikersifonhevel", ) -split_line_gdf = gpd.read_file( - cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True -) - # Load node edit data model_edits_url = cloud.joinurl(authority, "verwerkt", "model_edits.gpkg") model_edits_path = cloud.joinpath(authority, "verwerkt", "model_edits.gpkg") if not model_edits_path.exists(): cloud.download_file(model_edits_url) +# Load node edit data +fix_user_data_url = cloud.joinurl(authority, "verwerkt", "fix_user_data.gpkg") +fix_user_data_path = cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg") +if not fix_user_data_path.exists(): + cloud.download_file(fix_user_data_url) + +split_line_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", fix_user_data_path), layer="split_basins", fid_as_index=True +) -# level_boundary_gdf = gpd.read_file( -# cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="level_boundary", fid_as_index=True -# ) # %% read model model = Model.read(ribasim_toml) @@ -339,15 +341,16 @@ "remove_basin_area", "split_basin", "merge_basins", + "add_basin", "update_node", "add_basin_area", - "add_basin", "update_basin_area", "redirect_edge", "reverse_edge", "deactivate_node", "move_node", "remove_node", + "connect_basins", ] actions = [i for i in actions if i in gpd.list_layers(model_edits_path).name.to_list()] @@ -364,16 +367,16 @@ kwargs = {k: v for k, v in row._asdict().items() if k in keywords} method(**kwargs) +# remove unassigned basin area +model.fix_unassigned_basin_area() +model.remove_unassigned_basin_area() -# %% Reset static tables - -# Reset static tables model = reset_static_tables(model) - # %% write model model.use_validation = True model.write(ribasim_toml) model.invalid_topology_at_node().to_file(ribasim_toml.with_name("invalid_topology_at_connector_nodes.gpkg")) - +model.report_basin_area() +model.report_internal_basins() # %% diff --git a/src/ribasim_nl/ribasim_nl/model.py b/src/ribasim_nl/ribasim_nl/model.py index fe47d8b..2a90279 100644 --- a/src/ribasim_nl/ribasim_nl/model.py +++ b/src/ribasim_nl/ribasim_nl/model.py @@ -785,17 +785,17 @@ def merge_basins( if node_id in self.basin.area.df.node_id.to_numpy(): poly = self.basin.area.df.set_index("node_id").at[node_id, "geometry"] + if isinstance(poly, Polygon): + poly = MultiPolygon([poly]) + # if to_node_id has area we union both areas - if to_node_id in self.basin.area.df.node_id.to_numpy(): + if len(self.basin.area.df.loc[self.basin.area.df.node_id == to_node_id]) == 1: poly = poly.union(self.basin.area.df.set_index("node_id").at[to_node_id, "geometry"]) - if isinstance(poly, Polygon): - poly = MultiPolygon([poly]) + self.basin.area.df.loc[self.basin.area.df.node_id == to_node_id, ["geometry"]] = poly # else we add a record to basin else: - if isinstance(poly, Polygon): - poly = MultiPolygon([poly]) self.basin.area.df.loc[self.basin.area.df.index.max() + 1] = { "node_id": to_node_id, "geometry": poly,