Skip to content

Commit

Permalink
Merge branch 'main' into nzv
Browse files Browse the repository at this point in the history
  • Loading branch information
visr authored Dec 20, 2024
2 parents dd3054d + 030524b commit e417544
Show file tree
Hide file tree
Showing 6 changed files with 125 additions and 32 deletions.
2 changes: 1 addition & 1 deletion notebooks/drents_overijsselse_delta/00_get_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@

ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml")
if ribasim_toml.exists():
ribasim_toml.rename(ribasim_toml.with_name(f"{short_name}.toml"))
ribasim_toml.replace(ribasim_toml.with_name(f"{short_name}.toml"))
29 changes: 16 additions & 13 deletions notebooks/drents_overijsselse_delta/01_fix_model_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,22 @@
layer="duikersifonhevel",
)

split_line_gdf = gpd.read_file(
cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True
)

# Load node edit data
model_edits_url = cloud.joinurl(authority, "verwerkt", "model_edits.gpkg")
model_edits_path = cloud.joinpath(authority, "verwerkt", "model_edits.gpkg")
if not model_edits_path.exists():
cloud.download_file(model_edits_url)

# Load node edit data
fix_user_data_url = cloud.joinurl(authority, "verwerkt", "fix_user_data.gpkg")
fix_user_data_path = cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg")
if not fix_user_data_path.exists():
cloud.download_file(fix_user_data_url)

split_line_gdf = gpd.read_file(
cloud.joinpath(authority, "verwerkt", fix_user_data_path), layer="split_basins", fid_as_index=True
)

# level_boundary_gdf = gpd.read_file(
# cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="level_boundary", fid_as_index=True
# )

# %% read model
model = Model.read(ribasim_toml)
Expand Down Expand Up @@ -339,15 +341,16 @@
"remove_basin_area",
"split_basin",
"merge_basins",
"add_basin",
"update_node",
"add_basin_area",
"add_basin",
"update_basin_area",
"redirect_edge",
"reverse_edge",
"deactivate_node",
"move_node",
"remove_node",
"connect_basins",
]

actions = [i for i in actions if i in gpd.list_layers(model_edits_path).name.to_list()]
Expand All @@ -364,16 +367,16 @@
kwargs = {k: v for k, v in row._asdict().items() if k in keywords}
method(**kwargs)

# remove unassigned basin area
model.fix_unassigned_basin_area()
model.remove_unassigned_basin_area()

# %% Reset static tables

# Reset static tables
model = reset_static_tables(model)

# %% write model
model.use_validation = True
model.write(ribasim_toml)

model.invalid_topology_at_node().to_file(ribasim_toml.with_name("invalid_topology_at_connector_nodes.gpkg"))

model.report_basin_area()
model.report_internal_basins()
# %%
54 changes: 52 additions & 2 deletions notebooks/hunze_en_aas/01_fix_model_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@
if not model_edits_path.exists():
cloud.download_file(model_edits_url)

# Load area file to fill basin area holes
ribasim_areas_path = cloud.joinpath(authority, "verwerkt", "4_ribasim", "areas.gpkg")
ribasim_areas_gdf = gpd.read_file(ribasim_areas_path, fid_as_index=True, layer="areas")


# %% some stuff we'll need again
manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1])
Expand Down Expand Up @@ -90,18 +94,26 @@

# Reset static tables
model = reset_static_tables(model)
# fix unassigned basin area
model.fix_unassigned_basin_area()
model.explode_basin_area()
# fix unassigned basin area
model.fix_unassigned_basin_area()

# %%

actions = [
"remove_basin_area",
"remove_node",
"add_basin",
"update_node",
"add_basin_area",
"update_basin_area",
"merge_basins",
"reverse_edge",
"redirect_edge",
"merge_basins",
"move_node",
"connect_basins",
"update_node",
"deactivate_node",
]
actions = [i for i in actions if i in gpd.list_layers(model_edits_path).name.to_list()]
Expand All @@ -117,9 +129,47 @@
method(**kwargs)


# %% Assign Ribasim model ID's (dissolved areas) to the model basin areas (original areas with code) by overlapping the Ribasim area file baed on largest overlap
# then assign Ribasim node-ID's to areas with the same area code. Many nodata areas disappear by this method
# Create the overlay of areas
combined_basin_areas_gdf = gpd.overlay(ribasim_areas_gdf, model.basin.area.df, how="union").explode()
combined_basin_areas_gdf["geometry"] = combined_basin_areas_gdf["geometry"].apply(lambda x: x if x.has_z else x)

# Calculate area for each geometry
combined_basin_areas_gdf["area"] = combined_basin_areas_gdf.geometry.area

# Separate rows with and without node_id
non_null_basin_areas_gdf = combined_basin_areas_gdf[combined_basin_areas_gdf["node_id"].notna()]

# Find largest area node_ids for each code
largest_area_node_ids = non_null_basin_areas_gdf.loc[
non_null_basin_areas_gdf.groupby("code")["area"].idxmax(), ["code", "node_id"]
]

# Merge largest area node_ids back into the combined DataFrame
combined_basin_areas_gdf = combined_basin_areas_gdf.merge(
largest_area_node_ids, on="code", how="left", suffixes=("", "_largest")
)

# Fill missing node_id with the largest_area node_id
combined_basin_areas_gdf["node_id"] = combined_basin_areas_gdf["node_id"].fillna(
combined_basin_areas_gdf["node_id_largest"]
)
combined_basin_areas_gdf.drop(columns=["node_id_largest"], inplace=True)
combined_basin_areas_gdf = combined_basin_areas_gdf.drop_duplicates()
combined_basin_areas_gdf = combined_basin_areas_gdf.dissolve(by="node_id").reset_index()
combined_basin_areas_gdf = combined_basin_areas_gdf[["node_id", "geometry"]]
combined_basin_areas_gdf.index.name = "fid"

model.basin.area.df = combined_basin_areas_gdf

model.remove_unassigned_basin_area()

# %% write model
model.use_validation = True
model.write(ribasim_toml)
model.invalid_topology_at_node().to_file(ribasim_toml.with_name("invalid_topology_at_connector_nodes.gpkg"))
model.report_basin_area()
model.report_internal_basins()

# %%
10 changes: 0 additions & 10 deletions notebooks/upload_feedback_formulieren.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,7 @@
cloud = CloudStorage()

WATER_AUTHORITIES = [
"AaenMaas",
"BrabantseDelta",
"DeDommel",
"DrentsOverijsselseDelta",
"HunzeenAas",
"Limburg",
"Noorderzijlvest",
"RijnenIJssel",
"StichtseRijnlanden",
"ValleienVeluwe",
"Vechtstromen",
]

FEEDBACK_XLS = cloud.joinpath("Basisgegevens", "feedbackformulier", "Feedback Formulier.xlsx")
Expand Down
15 changes: 9 additions & 6 deletions src/ribasim_nl/ribasim_nl/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,7 +713,7 @@ def remove_unassigned_basin_area(self):
if self.basin.area.df.node_id.duplicated().any():
df = df.dissolve(by="node_id").reset_index()
df.index.name = "fid"
self.basin.area.df = df
self.basin.area.df = df

def explode_basin_area(self, remove_z=True):
df = self.basin.area.df.explode().reset_index(drop=True)
Expand Down Expand Up @@ -785,17 +785,17 @@ def merge_basins(
if node_id in self.basin.area.df.node_id.to_numpy():
poly = self.basin.area.df.set_index("node_id").at[node_id, "geometry"]

if isinstance(poly, Polygon):
poly = MultiPolygon([poly])

# if to_node_id has area we union both areas
if to_node_id in self.basin.area.df.node_id.to_numpy():
if len(self.basin.area.df.loc[self.basin.area.df.node_id == to_node_id]) == 1:
poly = poly.union(self.basin.area.df.set_index("node_id").at[to_node_id, "geometry"])
if isinstance(poly, Polygon):
poly = MultiPolygon([poly])

self.basin.area.df.loc[self.basin.area.df.node_id == to_node_id, ["geometry"]] = poly

# else we add a record to basin
else:
if isinstance(poly, Polygon):
poly = MultiPolygon([poly])
self.basin.area.df.loc[self.basin.area.df.index.max() + 1] = {
"node_id": to_node_id,
"geometry": poly,
Expand Down Expand Up @@ -891,3 +891,6 @@ def invalid_topology_at_node(self, edge_type: str = "flow") -> gpd.GeoDataFrame:
return gpd.GeoDataFrame(
[], columns=["node_id", "node_type", "exception"], geometry=gpd.GeoSeries(crs=self.crs)
).set_index("node_id")


# %%
47 changes: 47 additions & 0 deletions src/ribasim_nl/ribasim_nl/run_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import os
import subprocess
from pathlib import Path

# TODO: add ribasim_exe so it can be used if ribasim is not part of env path
# TODO: check if ribasim is in path, stop if not and ribasim_exe is not provided
# TODO: raise FileNotFoundError if toml_path does not exist. User


def run(
toml_path: Path,
stream_output: bool = True,
returncode: bool = True,
):
"""To run a Ribasim model
Args:
toml_path (Path): path to your ribasim toml-file
stream_output (bool, optional): stream output in IDE. Defaults to False.
returncode (bool, optional): return return code after running model. Defaults to True.
"""
env = os.environ.copy()

input = ""
proc = subprocess.Popen(
["ribasim", toml_path.as_posix()],
cwd=toml_path.parent.as_posix(),
env=env,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
encoding="ascii",
)
if stream_output:
with proc:
proc.stdin.write(input)
proc.stdin.close()
for line in proc.stdout:
print(line, end="")
outs = None
else:
outs, _ = proc.communicate(input)

if returncode:
return proc.returncode
else:
return outs

0 comments on commit e417544

Please sign in to comment.