diff --git a/notebooks/02_add_peilen.py b/notebooks/02_add_peilen.py new file mode 100644 index 0000000..009f64e --- /dev/null +++ b/notebooks/02_add_peilen.py @@ -0,0 +1,99 @@ +# %% +# This script is to define the target level of the basin. +# It is taken from the lowest Gemiddeld zomerpeil (GPGZMRPL), located at the structure + + +from ribasim_nl import CloudStorage, Model +from ribasim_nl.streefpeilen import add_streefpeil + +cloud = CloudStorage() + +# List of different parameters for the models, including peilgebieden path for each authority +params_list = [ + ( + "HunzeenAas", + "hea", + "verwerkt/1_ontvangen_data/peilgebieden.gpkg", + None, + "gpgzmrpl", + "gpgident", + "HunzeenAas_fix_model_network", + ), + ( + "DrentsOverijsselseDelta", + "dod", + "verwerkt/1_ontvangen_data/extra data/Peilgebieden/Peilgebieden.shp", + None, + "GPGZMRPL", + "GPGIDENT", + "DrentsOverijsselseDelta_fix_model_network", + ), + ( + "AaenMaas", + "aam", + "verwerkt/downloads/WS_PEILGEBIEDPolygon.shp", + None, + "ZOMERPEIL", + "CODE", + "AaenMaas_fix_model_area", + ), + ( + "BrabantseDelta", + "wbd", + "verwerkt/4_ribasim/hydamo.gpkg", + "peilgebiedpraktijk", + "WS_ZOMERPEIL", + "CODE", + "BrabantseDelta_fix_model_area", + ), + ( + "StichtseRijnlanden", + "hdsr", + "verwerkt/4_ribasim/peilgebieden.gpkg", + None, + "WS_ZP", + "WS_PGID", + "StichtseRijnlanden_fix_model_area", + ), + ( + "ValleienVeluwe", + "venv", + "verwerkt/1_ontvangen_data/Eerste_levering/vallei_en_veluwe.gpkg", + "peilgebiedpraktijk", + "ws_max_peil", + "code", + "ValleienVeluwe_fix_model_area", + ), + ( + "Vechtstromen", + "vechtstromen", + "verwerkt/downloads/peilgebieden_voormalig_velt_en_vecht.gpkg", + None, + "GPGZMRPL", + "GPGIDENT", + "Vechtstromen_fix_model_area", + ), +] + +# Main function +if __name__ == "__main__": + for params in params_list: + # parse params + authority, short_name, peilgebieden_path, layername, targetlevel, code, modelpath = params + # parse paths + ribasim_dir = cloud.joinpath(authority, "modellen", modelpath) + ribasim_toml = ribasim_dir / f"{short_name}.toml" + peilgebieden_path = cloud.joinpath(authority, peilgebieden_path) + + # sync paths + cloud.synchronize(filepaths=[peilgebieden_path]) + cloud.synchronize(filepaths=[ribasim_dir], check_on_remote=False) # local model is not on remote + + # read model + model = Model.read(ribasim_toml) + + # add streefpeil + result = add_streefpeil(model, peilgebieden_path, layername, targetlevel, code) + + # write model + model.write(ribasim_toml) diff --git a/notebooks/noorderzijlvest/01_fix_model.py b/notebooks/noorderzijlvest/01_fix_model.py index 21c7d8a..e6847b4 100644 --- a/notebooks/noorderzijlvest/01_fix_model.py +++ b/notebooks/noorderzijlvest/01_fix_model.py @@ -3,7 +3,6 @@ import geopandas as gpd import pandas as pd -import requests from networkx import all_shortest_paths, shortest_path from ribasim.nodes import basin, level_boundary, manning_resistance, outlet, pump from shapely.geometry import MultiLineString @@ -26,25 +25,7 @@ ribasim_dir = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3") -for path in (he_shp, he_snap_shp, lines_shp, model_edits_path, ribasim_dir): - url = cloud.joinurl(*path.relative_to(cloud.data_dir).parts) - # check if file exists on remote, if not raise for status - r = requests.head(url, auth=cloud.auth) - r.raise_for_status() - - # check if file exists local, if not download - if not path.exists(): - print(f"download data for {path}") - - if path.suffix == ".shp": # with shapes we are to download the parent - path = path.parent - url = cloud.joinurl(*path.relative_to(cloud.data_dir).parts) - - if cloud.content(url): - cloud.download_content(url) - else: - cloud.download_file(url) - +cloud.synchronize(filepaths=(he_shp, he_snap_shp, lines_shp, model_edits_path, ribasim_dir)) ribasim_toml = ribasim_dir / "model.toml" # %% read model diff --git a/src/ribasim_nl/ribasim_nl/cloud.py b/src/ribasim_nl/ribasim_nl/cloud.py index aa40905..3c31905 100644 --- a/src/ribasim_nl/ribasim_nl/cloud.py +++ b/src/ribasim_nl/ribasim_nl/cloud.py @@ -432,3 +432,25 @@ def upload_model(self, authority: str, model: str, include_results=False, includ self.upload_content(model_version_dir) return ModelVersion(model, today.year, today.month, revision) + + def synchronize(self, filepaths: list[Path], check_on_remote: bool = True): + for path in filepaths: + path = Path(path) + url = self.joinurl(*path.relative_to(self.data_dir).parts) + # check if file exists on remote, if not raise for status + if check_on_remote: + r = requests.head(url, auth=self.auth) + r.raise_for_status() + + # check if file exists local, if not download + if not path.exists(): + print(f"download data for {path}") + + if path.suffix == ".shp": # with shapes we are to download the parent + path = path.parent + url = self.joinurl(*path.relative_to(self.data_dir).parts) + + if self.content(url): + self.download_content(url) + else: + self.download_file(url) diff --git a/src/ribasim_nl/ribasim_nl/streefpeilen.py b/src/ribasim_nl/ribasim_nl/streefpeilen.py new file mode 100644 index 0000000..3b8412c --- /dev/null +++ b/src/ribasim_nl/ribasim_nl/streefpeilen.py @@ -0,0 +1,91 @@ +from pathlib import Path + +import geopandas as gpd +import numpy as np +import pandas as pd + +from ribasim_nl import Model + + +def add_streefpeil(model: Model, peilgebieden_path: Path, layername: str, target_level: str, code: str): + authority = model.filepath.parents[2].name + print(authority) + # synchronize files + + # Check if the layername is provided and the peilgebieden_path is a GPKG file + if layername is None: + layername = gpd.list_layers(peilgebieden_path).at[0, "name"] + peilgebieden = gpd.read_file(peilgebieden_path, layer=layername, fid_as_index=True) + + # Explode Peilgebieden geometries to handle multiple parts of MultiPolygons + peilgebieden = peilgebieden.explode() + peilgebieden[target_level] = pd.to_numeric(peilgebieden[target_level], errors="coerce") + peilgebieden = peilgebieden.dropna(subset=[target_level]) + + # Ensure 'meta_code_waterbeheerder' exists in the dataframe as string + if "meta_code_waterbeheerder" not in model.basin.area.df.columns: + model.basin.area.df["meta_code_waterbeheerder"] = np.nan + model.basin.area.df["meta_code_waterbeheerder"] = model.basin.area.df["meta_code_waterbeheerder"].astype(str) + + # Ensure 'meta_streefpeil' exists in the dataframe as float + if "meta_streefpeil" not in model.basin.area.df.columns: + model.basin.area.df["meta_streefpeil"] = np.nan + model.basin.area.df["meta_streefpeil"] = model.basin.area.df["meta_streefpeil"].astype(float) + + # Prepare basin geometries + basins_gdf = gpd.GeoDataFrame(model.basin.area.df, geometry=model.basin.area.df["geometry"], crs=peilgebieden.crs) + + # Iterate over basins to calculate meta_streefpeil + for basin_row in basins_gdf.itertuples(): + node_id = basin_row.node_id # Use node_id as the unique identifier for the basin + basin_geometry = basin_row.geometry + downstream_nodes = model.node_table().df.loc[model.downstream_node_id(node_id=node_id)] + + # Create buffer of 100m around structures + buffer_100m_gdf = gpd.GeoDataFrame( + {"node_id": downstream_nodes.index, "geometry": downstream_nodes.geometry.buffer(100)}, crs=peilgebieden.crs + ) + buffer_100m_gdf = buffer_100m_gdf.reset_index(drop=True) + buffer_union = buffer_100m_gdf.dissolve(by="node_id").geometry.union_all() + + # Filter peilgebieden that intersect with buffered structures + overlapping_peilgebieden = peilgebieden[peilgebieden.geometry.intersects(buffer_union)] + + # Check if overlapping_peilgebieden are empty + if overlapping_peilgebieden.empty: + overlapping_peilgebieden = peilgebieden[peilgebieden.geometry.intersects(basin_geometry)] + + current_basin = basins_gdf[basins_gdf["node_id"] == node_id].copy() + + # Perform the overlay with the basin to get linked areas and calculate the intersection_area + linked = gpd.overlay(overlapping_peilgebieden, current_basin, how="intersection", keep_geom_type=False) + current_basin.loc[:, "area"] = current_basin.geometry.area + # Now calculate the intersection area + + linked["intersection_area"] = linked.geometry.area + linked["intersection_area_fraction"] = linked["intersection_area"] / current_basin["area"].iloc[0] + # Ensure linked_filtered is initialized and contains valid data + linked_filtered = None + # Take the minimum GPGZMRPL from the peilgebied with a structure. The intersection_area should be at least 50m2 + if not linked.empty: + linked_filtered = ( + linked.loc[linked["intersection_area_fraction"] > 0.025] + .dropna(subset=[target_level]) + .sort_values(by=[target_level]) + .drop_duplicates(keep="first") + ) + # Check if linked_filtered is empty or invalid + if linked_filtered is None or linked_filtered.empty: + # For now, take the first valid entry from linked + linked_filtered = ( + linked.dropna(subset=[target_level]).sort_values(by=[target_level]).drop_duplicates(keep="first") + ) + + if not linked_filtered.empty: + model.basin.area.df.loc[model.basin.area.df.node_id == node_id, ["meta_streefpeil"]] = linked_filtered.iloc[ + 0 + ][target_level] + model.basin.area.df.loc[model.basin.area.df.node_id == node_id, ["meta_code_waterbeheerder"]] = ( + linked_filtered.iloc[0][code] + ) + return model