Skip to content

Commit

Permalink
Merge branch 'main' into repro-dommel
Browse files Browse the repository at this point in the history
  • Loading branch information
visr authored Dec 20, 2024
2 parents 93d2e7a + bc0aaea commit a737795
Show file tree
Hide file tree
Showing 9 changed files with 238 additions and 51 deletions.
2 changes: 1 addition & 1 deletion notebooks/drents_overijsselse_delta/00_get_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@

ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml")
if ribasim_toml.exists():
ribasim_toml.rename(ribasim_toml.with_name(f"{short_name}.toml"))
ribasim_toml.replace(ribasim_toml.with_name(f"{short_name}.toml"))
29 changes: 16 additions & 13 deletions notebooks/drents_overijsselse_delta/01_fix_model_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,22 @@
layer="duikersifonhevel",
)

split_line_gdf = gpd.read_file(
cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True
)

# Load node edit data
model_edits_url = cloud.joinurl(authority, "verwerkt", "model_edits.gpkg")
model_edits_path = cloud.joinpath(authority, "verwerkt", "model_edits.gpkg")
if not model_edits_path.exists():
cloud.download_file(model_edits_url)

# Load node edit data
fix_user_data_url = cloud.joinurl(authority, "verwerkt", "fix_user_data.gpkg")
fix_user_data_path = cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg")
if not fix_user_data_path.exists():
cloud.download_file(fix_user_data_url)

split_line_gdf = gpd.read_file(
cloud.joinpath(authority, "verwerkt", fix_user_data_path), layer="split_basins", fid_as_index=True
)

# level_boundary_gdf = gpd.read_file(
# cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="level_boundary", fid_as_index=True
# )

# %% read model
model = Model.read(ribasim_toml)
Expand Down Expand Up @@ -339,15 +341,16 @@
"remove_basin_area",
"split_basin",
"merge_basins",
"add_basin",
"update_node",
"add_basin_area",
"add_basin",
"update_basin_area",
"redirect_edge",
"reverse_edge",
"deactivate_node",
"move_node",
"remove_node",
"connect_basins",
]

actions = [i for i in actions if i in gpd.list_layers(model_edits_path).name.to_list()]
Expand All @@ -364,16 +367,16 @@
kwargs = {k: v for k, v in row._asdict().items() if k in keywords}
method(**kwargs)

# remove unassigned basin area
model.fix_unassigned_basin_area()
model.remove_unassigned_basin_area()

# %% Reset static tables

# Reset static tables
model = reset_static_tables(model)

# %% write model
model.use_validation = True
model.write(ribasim_toml)

model.invalid_topology_at_node().to_file(ribasim_toml.with_name("invalid_topology_at_connector_nodes.gpkg"))

model.report_basin_area()
model.report_internal_basins()
# %%
54 changes: 52 additions & 2 deletions notebooks/hunze_en_aas/01_fix_model_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@
if not model_edits_path.exists():
cloud.download_file(model_edits_url)

# Load area file to fill basin area holes
ribasim_areas_path = cloud.joinpath(authority, "verwerkt", "4_ribasim", "areas.gpkg")
ribasim_areas_gdf = gpd.read_file(ribasim_areas_path, fid_as_index=True, layer="areas")


# %% some stuff we'll need again
manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1])
Expand Down Expand Up @@ -90,18 +94,26 @@

# Reset static tables
model = reset_static_tables(model)
# fix unassigned basin area
model.fix_unassigned_basin_area()
model.explode_basin_area()
# fix unassigned basin area
model.fix_unassigned_basin_area()

# %%

actions = [
"remove_basin_area",
"remove_node",
"add_basin",
"update_node",
"add_basin_area",
"update_basin_area",
"merge_basins",
"reverse_edge",
"redirect_edge",
"merge_basins",
"move_node",
"connect_basins",
"update_node",
"deactivate_node",
]
actions = [i for i in actions if i in gpd.list_layers(model_edits_path).name.to_list()]
Expand All @@ -117,9 +129,47 @@
method(**kwargs)


# %% Assign Ribasim model ID's (dissolved areas) to the model basin areas (original areas with code) by overlapping the Ribasim area file baed on largest overlap
# then assign Ribasim node-ID's to areas with the same area code. Many nodata areas disappear by this method
# Create the overlay of areas
combined_basin_areas_gdf = gpd.overlay(ribasim_areas_gdf, model.basin.area.df, how="union").explode()
combined_basin_areas_gdf["geometry"] = combined_basin_areas_gdf["geometry"].apply(lambda x: x if x.has_z else x)

# Calculate area for each geometry
combined_basin_areas_gdf["area"] = combined_basin_areas_gdf.geometry.area

# Separate rows with and without node_id
non_null_basin_areas_gdf = combined_basin_areas_gdf[combined_basin_areas_gdf["node_id"].notna()]

# Find largest area node_ids for each code
largest_area_node_ids = non_null_basin_areas_gdf.loc[
non_null_basin_areas_gdf.groupby("code")["area"].idxmax(), ["code", "node_id"]
]

# Merge largest area node_ids back into the combined DataFrame
combined_basin_areas_gdf = combined_basin_areas_gdf.merge(
largest_area_node_ids, on="code", how="left", suffixes=("", "_largest")
)

# Fill missing node_id with the largest_area node_id
combined_basin_areas_gdf["node_id"] = combined_basin_areas_gdf["node_id"].fillna(
combined_basin_areas_gdf["node_id_largest"]
)
combined_basin_areas_gdf.drop(columns=["node_id_largest"], inplace=True)
combined_basin_areas_gdf = combined_basin_areas_gdf.drop_duplicates()
combined_basin_areas_gdf = combined_basin_areas_gdf.dissolve(by="node_id").reset_index()
combined_basin_areas_gdf = combined_basin_areas_gdf[["node_id", "geometry"]]
combined_basin_areas_gdf.index.name = "fid"

model.basin.area.df = combined_basin_areas_gdf

model.remove_unassigned_basin_area()

# %% write model
model.use_validation = True
model.write(ribasim_toml)
model.invalid_topology_at_node().to_file(ribasim_toml.with_name("invalid_topology_at_connector_nodes.gpkg"))
model.report_basin_area()
model.report_internal_basins()

# %%
62 changes: 51 additions & 11 deletions notebooks/noorderzijlvest/01b_fix_basin_area.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import pandas as pd
from networkx import all_shortest_paths, shortest_path
from shapely.geometry import MultiLineString
from shapely.ops import snap, split

from ribasim_nl import CloudStorage, Model, Network

Expand Down Expand Up @@ -32,9 +33,27 @@
cloud_storage.joinpath(
authority_name, "verwerkt", "5_D_HYDRO_export", "hydroobjecten", "Noorderzijlvest_hydroobjecten.shp"
),
use_fid_as_indes=True,
)

points = (
model.node_table().df[model.node_table().df.node_type.isin(["TabulatedRatingCurve", "Outlet", "Pump"])].geometry
)

for row in lines_gdf.itertuples():
line = row.geometry
snap_points = points[points.distance(line) < 0.1]
snap_points = snap_points[snap_points.distance(line.boundary) > 0.1]
if not snap_points.empty:
snap_point = snap_points.union_all()
line = snap(line, snap_point, 1e-8)
split_lines = split(line, snap_point)

lines_gdf.loc[row.Index, ["geometry"]] = split_lines

lines_gdf = lines_gdf.explode(index_parts=False, ignore_index=True)
lines_gdf.crs = 28992
network = Network(lines_gdf)
network = Network(lines_gdf.copy())
network.to_file(cloud_storage.joinpath(authority_name, "verwerkt", "network.gpkg"))

# %% add snap_point to he_df
Expand Down Expand Up @@ -64,6 +83,9 @@
he_outlet_df.set_index("HEIDENT", inplace=True)
he_df.set_index("HEIDENT", inplace=True)

# niet altijd ligt de coordinaat goed
he_outlet_df.loc["GPGKST0470", ["geometry"]] = model.manning_resistance[892].geometry

he_outlet_df.to_file(cloud_storage.joinpath(authority_name, "verwerkt", "HydrologischeEenheden_v45_outlets.gpkg"))

# %% Edit network
Expand All @@ -78,7 +100,16 @@
cloud_storage.download_file(model_edits_url)


for action in gpd.list_layers(model_edits_path).name.to_list():
for action in [
"merge_basins",
"remove_node",
"update_node",
"reverse_edge",
"connect_basins",
"move_node",
"add_basin",
"remove_edge",
]:
print(action)
# get method and args
method = getattr(model, action)
Expand Down Expand Up @@ -118,9 +149,7 @@ def find_basin_id(kwk_code):
def get_network_node(point):
node = network.move_node(point, max_distance=1, align_distance=10)
if node is None:
node = network.add_node(point, max_distance=1)
if node is None:
node = network.nodes.distance(point).idxmin()
node = network.add_node(point, max_distance=1, align_distance=10)
return node


Expand Down Expand Up @@ -184,7 +213,7 @@ def get_network_node(point):
)
model.edge.df.loc[mask, ["geometry"]] = edge

mask = he_df.node_id.isna() & (he_outlet_df.distance(MultiLineString(data)) < 1)
mask = he_df.node_id.isna() & (he_outlet_df.distance(MultiLineString(data)) < 0.75)
he_df.loc[mask, ["node_id"]] = node_id

# %% add last missings
Expand All @@ -201,23 +230,34 @@ def get_network_node(point):
he_df.loc[row.Index, ["node_id"]] = basin_node_id


# %% renew model Basin Area

# based on actions above we update the Basin / Area table:
# basins we found on KWKuit defined in the he-table

data = []
for node_id, df in he_df[he_df["node_id"].notna()].groupby("node_id"):
geometry = df.union_all()
streefpeil = df["OPVAFWZP"].min()

data += [{"node_id": node_id, "meta_streefpeil": streefpeil, "geometry": geometry}]

df = gpd.GeoDataFrame(data, crs=model.crs)
df.loc[:, "geometry"] = df.buffer(0.1).buffer(-0.1)
df.index.name = "fid"
model.basin.area.df = df

for action in ["remove_basin_area", "add_basin_area"]:
print(action)
# get method and args
method = getattr(model, action)
keywords = inspect.getfullargspec(method).args
df = gpd.read_file(model_edits_path, layer=action, fid_as_index=True)
for row in df.itertuples():
# filter kwargs by keywords
kwargs = {k: v for k, v in row._asdict().items() if k in keywords}
method(**kwargs)

model.remove_unassigned_basin_area()

# %%

model.write(ribasim_model_dir.with_stem(f"{authority_name}_fix_model_area") / f"{model_short_name}.toml")
model.report_basin_area()
model.report_internal_basins()
# %%
10 changes: 0 additions & 10 deletions notebooks/upload_feedback_formulieren.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,7 @@
cloud = CloudStorage()

WATER_AUTHORITIES = [
"AaenMaas",
"BrabantseDelta",
"DeDommel",
"DrentsOverijsselseDelta",
"HunzeenAas",
"Limburg",
"Noorderzijlvest",
"RijnenIJssel",
"StichtseRijnlanden",
"ValleienVeluwe",
"Vechtstromen",
]

FEEDBACK_XLS = cloud.joinpath("Basisgegevens", "feedbackformulier", "Feedback Formulier.xlsx")
Expand Down
59 changes: 57 additions & 2 deletions src/ribasim_nl/ribasim_nl/geometry.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
# %%
"""Functions to apply on a shapely.geometry"""

from typing import get_type_hints

from shapely.geometry import LineString, MultiPolygon, Point, Polygon
from shapely.ops import polygonize, polylabel
from shapely.geometry import LineString, MultiLineString, MultiPolygon, Point, Polygon
from shapely.ops import polygonize, polylabel, snap, split

from ribasim_nl.generic import _validate_inputs

Expand Down Expand Up @@ -193,3 +194,57 @@ def edge(point_from: Point, point_to: Point) -> LineString:
if point_to.has_z:
point_to = Point(point_to.x, point_to.y)
return LineString((point_from, point_to))


def project_point(line: LineString, point: Point, tolerance: float = 1) -> Point:
"""Projects a point to a LineString
Args:
line (LineString): LineString to project point on
point (Point): Point to project on LineString
tolerance (float): Tolerance of point to line
Returns
-------
Point: Projected Point
"""
if point.distance(line) > tolerance:
raise ValueError(f"point > {tolerance} from line ({point.distance(line)})")
return line.interpolate(line.project(point))


def split_line(line: LineString, point: Point, tolerance: float = 0.1) -> MultiLineString | LineString:
"""Split a line into a 2 geometry multiline
Args:
line (LineString): input line
point (Point): point to split on
tolerance (float, optional): tolerance of point to line. Defaults to 0.1.
Returns
-------
MultiLineString | LineString: in case LineString is split, return MultiLineString with 2 LineStrings
"""
# if point is within tolerance of line.boundary we don't split
if line.boundary.distance(point) < tolerance:
return line

# try if a normal split works
result = split(line, point)
if len(list(result.geoms)) == 2:
return MultiLineString(result)

# if not, we try it again
else:
# project the point on the line, checking if it's not too far of first
point = project_point(line, point, tolerance)

# we snap the line to the point so it will have the point coordinate
line = snap(line, point, 1e-8)

# now we should be able to split
result = split(line, point)
if len(list(result.geoms)) == 2:
return MultiLineString(result)
else:
return line
Loading

0 comments on commit a737795

Please sign in to comment.