From e754acacfe28a365a78c0fffc56aa023a879b230 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Thu, 5 Sep 2024 15:47:19 +0200 Subject: [PATCH 01/23] Dommel basin profile (#142) With this branch we can create de dommel model version 2024.8.4 from 2024.6.3 All code used is updated to function with current ribasim-version (main) --------- Co-authored-by: Martijn Visser --- notebooks/de_dommel/01_fix_model_network.py | 82 ++-- notebooks/de_dommel/02_fix_edges.py | 9 +- notebooks/de_dommel/03_fix_basin_area.py | 9 +- notebooks/de_dommel/04_parameterize_model.py | 437 ++++++++++++++++++ notebooks/de_dommel/rare_edges.gpkg | Bin 0 -> 106496 bytes pixi.lock | 8 +- src/ribasim_nl/ribasim_nl/model.py | 87 +++- .../ribasim_nl/network_validator.py | 10 +- src/ribasim_nl/ribasim_nl/structure_node.py | 84 ++++ 9 files changed, 630 insertions(+), 96 deletions(-) create mode 100644 notebooks/de_dommel/04_parameterize_model.py create mode 100644 notebooks/de_dommel/rare_edges.gpkg create mode 100644 src/ribasim_nl/ribasim_nl/structure_node.py diff --git a/notebooks/de_dommel/01_fix_model_network.py b/notebooks/de_dommel/01_fix_model_network.py index d549f20..1c87870 100644 --- a/notebooks/de_dommel/01_fix_model_network.py +++ b/notebooks/de_dommel/01_fix_model_network.py @@ -1,11 +1,10 @@ # %% -import sqlite3 import geopandas as gpd -import pandas as pd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet from ribasim_nl import CloudStorage, Model, NetworkValidator +from shapely.geometry import Point cloud = CloudStorage() @@ -24,26 +23,6 @@ basin.State(level=[0]), ] -# %% remove urban_runoff -# Connect to the SQLite database - -conn = sqlite3.connect(database_gpkg) - -# get table into DataFrame -table = "Basin / static" -df = pd.read_sql_query(f"SELECT * FROM '{table}'", conn) - -# drop urban runoff column if exists -if "urban_runoff" in df.columns: - df.drop(columns="urban_runoff", inplace=True) - - # Write the DataFrame back to the SQLite table - df.to_sql(table, conn, if_exists="replace", index=False) - -# # Close the connection -conn.close() - - # %% read model model = Model.read(ribasim_toml) @@ -52,10 +31,7 @@ # %% verwijder duplicated edges # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2288780504 -model.edge.df = model.edge.df[~((model.edge.df.from_node_type == "Basin") & (model.edge.df.to_node_type == "Basin"))] - # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291081244 -duplicated_fids = network_validator.edge_duplicated().index.to_list() model.edge.df = model.edge.df.drop_duplicates(subset=["from_node_id", "to_node_id"]) if not network_validator.edge_duplicated().empty: @@ -64,20 +40,16 @@ # %% toevoegen bovenstroomse knopen # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291091067 -edge_mask = model.edge.df.index.isin(duplicated_fids) node_id = model.next_node_id -edge_fid = next(i for i in duplicated_fids if i in model.edge.df.index) -model.edge.df.loc[edge_mask, ["from_node_type"]] = "Basin" -model.edge.df.loc[edge_mask, ["from_node_id"]] = node_id +edge_id = model.edge.df.loc[model.edge.df.to_node_id == 251].index[0] +model.edge.df.loc[edge_id, ["from_node_id"]] = node_id -node = Node(node_id, model.edge.df.at[edge_fid, "geometry"].boundary.geoms[0]) +node = Node(node_id, model.edge.df.at[edge_id, "geometry"].boundary.geoms[0]) model.basin.area.df.loc[model.basin.area.df.node_id == 1009, ["node_id"]] = node_id area = basin.Area(geometry=model.basin.area[node_id].geometry.to_list()) model.basin.add(node, basin_data + [area]) # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291111647 - - for row in network_validator.edge_incorrect_connectivity().itertuples(): # drop edge from model model.remove_edge(row.from_node_id, row.to_node_id, remove_disconnected_nodes=False) @@ -91,28 +63,31 @@ if row.to_node_id == 2: geometry = row.geometry.interpolate(0.99, normalized=True) name = "" + meta_object_type = "openwater" if row.to_node_id == 14: gdf = gpd.read_file( - cloud.joinpath("DeDommel", "verwerkt", "1_ontvangen_data", "Geodata", "data_Q42018.gpkg"), - layer="DuikerSifonHevel", + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg"), + layer="duikersifonhevel", engine="pyogrio", fid_as_index=True, ) - kdu = gdf.loc[5818] + kdu = gdf.loc[250] geometry = kdu.geometry.interpolate(0.5, normalized=True) - name = kdu.objectId + geometry = Point(geometry.x, geometry.y) + name = kdu.CODE + meta_object_type = "duikersifonhevel" # add manning-node - manning_node_id = model.next_node_id - manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) - model.manning_resistance.add( - Node(node_id=manning_node_id, geometry=geometry, name=name), - [manning_data], + outlet_node_id = model.next_node_id + outlet_data = outlet.Static(flow_rate=[100]) + model.outlet.add( + Node(node_id=outlet_node_id, geometry=geometry, name=name, meta_object_type=meta_object_type), + [outlet_data], ) # add edges - model.edge.add(model.basin[row.from_node_id], model.manning_resistance[manning_node_id]) - model.edge.add(model.manning_resistance[manning_node_id], model.level_boundary[row.to_node_id]) + model.edge.add(model.basin[row.from_node_id], model.outlet[outlet_node_id]) + model.edge.add(model.outlet[outlet_node_id], model.level_boundary[row.to_node_id]) if not network_validator.edge_incorrect_connectivity().empty: @@ -122,11 +97,11 @@ # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291271525 for row in network_validator.node_internal_basin().itertuples(): - if row.node_id not in model.basin.area.df.node_id.to_numpy(): # remove or change to level-boundary - edge_select_df = model.edge.df[model.edge.df.to_node_id == row.node_id] + if row.Index not in model.basin.area.df.node_id.to_numpy(): # remove or change to level-boundary + edge_select_df = model.edge.df[model.edge.df.to_node_id == row.Index] if len(edge_select_df) == 1: - if edge_select_df.iloc[0]["from_node_type"] == "FlowBoundary": - model.remove_node(row.node_id) + if model.node_table().df.at[edge_select_df.iloc[0]["from_node_id"], "node_type"] == "FlowBoundary": + model.remove_node(row.Index) model.remove_node(edge_select_df.iloc[0]["from_node_id"]) model.edge.df.drop(index=edge_select_df.index[0], inplace=True) @@ -166,7 +141,7 @@ geometry = gdf.loc[2751].geometry.interpolate(0.5, normalized=True) node_id = model.next_node_id -data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) model.manning_resistance.add(Node(node_id=node_id, geometry=geometry), [manning_data]) @@ -192,6 +167,7 @@ # # see: https://github.com/Deltares/Ribasim-NL/issues/132 model.basin.area.df.loc[model.basin.area.df.duplicated("node_id"), ["node_id"]] = -1 model.basin.area.df.reset_index(drop=True, inplace=True) +model.basin.area.df.index.name = "fid" model.fix_unassigned_basin_area() model.fix_unassigned_basin_area(method="closest", distance=100) model.fix_unassigned_basin_area() @@ -199,10 +175,8 @@ model.basin.area.df = model.basin.area.df[~model.basin.area.df.node_id.isin(model.unassigned_basin_area.node_id)] # # %% write model -ribasim_toml = ribasim_toml.parents[1].joinpath("DeDommel", ribasim_toml.name) -model.write(ribasim_toml) +model.edge.df.reset_index(drop=True, inplace=True) +model.edge.df.index.name = "edge_id" +ribasim_toml = ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_model_network", "model.toml") -# %% upload model - -# cloud.upload_model("DeDommel", "DeDommel") -# %% +model.write(ribasim_toml) diff --git a/notebooks/de_dommel/02_fix_edges.py b/notebooks/de_dommel/02_fix_edges.py index fc37cf7..3667b14 100644 --- a/notebooks/de_dommel/02_fix_edges.py +++ b/notebooks/de_dommel/02_fix_edges.py @@ -6,7 +6,7 @@ # %% load model -ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel", "model.toml") +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_model_network", "model.toml") model = Model.read(ribasim_toml) # %% network from HydroObjects @@ -15,13 +15,13 @@ network = Network.from_network_gpkg(network_gpkg) else: network = Network.from_lines_gpkg( - cloud.joinpath("DeDommel", "verwerkt", "2_voorbewerking", "hydamo.gpkg"), layer="hydroobject" + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject" ) network.to_file(network_gpkg) # %% edges follow HydroObjects model.reset_edge_geometry() -node_df = model.node_table().df.set_index("node_id") +node_df = model.node_table().df data = [] for row in model.edge.df.itertuples(): try: @@ -58,4 +58,5 @@ gpd.GeoDataFrame(data, crs=28992).to_file("rare_edges.gpkg") # %% write model -# model.write(ribasim_toml) +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_edges", "model.toml") +model.write(ribasim_toml) diff --git a/notebooks/de_dommel/03_fix_basin_area.py b/notebooks/de_dommel/03_fix_basin_area.py index 504362f..7e8dd0c 100644 --- a/notebooks/de_dommel/03_fix_basin_area.py +++ b/notebooks/de_dommel/03_fix_basin_area.py @@ -8,7 +8,7 @@ # %% load model -ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel", "model.toml") +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_edges", "model.toml") model = Model.read(ribasim_toml) @@ -23,7 +23,7 @@ dissolved_area_gdf.to_file(cloud.joinpath("DeDommel", "verwerkt", "water_area.gpkg")) # %% -basin_df = model.basin.node.df.set_index("node_id") +basin_df = model.basin.node.df basin_area_df = gpd.read_file( cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg"), engine="pyogrio", fid_as_index=True ) @@ -51,7 +51,7 @@ area_df = selected_areas[selected_areas.contains(row.geometry)] if not area_df.empty: name = area_df.iloc[0].NAAM - model.basin.node.df.loc[model.basin.node[row.Index].index[0], ["name"]] = name + model.basin.node.df.loc[row.Index, ["name"]] = name # assign name to edges if defined model.edge.df.loc[edges_mask, ["name"]] = name @@ -75,7 +75,8 @@ area_df = gpd.GeoDataFrame(data, crs=model.basin.node.df.crs) area_df = area_df[~area_df.is_empty] +area_df.index.name = "fid" model.basin.area.df = area_df # %% - +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_areas", "model.toml") model.write(ribasim_toml) diff --git a/notebooks/de_dommel/04_parameterize_model.py b/notebooks/de_dommel/04_parameterize_model.py new file mode 100644 index 0000000..82675ae --- /dev/null +++ b/notebooks/de_dommel/04_parameterize_model.py @@ -0,0 +1,437 @@ +# %% +import geopandas as gpd +import pandas as pd +from ribasim.nodes import manning_resistance, pump +from ribasim_nl import CloudStorage, Model +from ribasim_nl.structure_node import get_outlet, get_tabulated_rating_curve +from shapely.geometry import MultiLineString + +PROFIEL_ID_COLUMN = "PROFIELLIJNID" +PROFIEL_LINE_ID_COLUMN = "profiel_id" +PROFIEL_HOOGTE_COLUMN = "HOOGTE" +PROFIEL_BREEDTE_COLUMN = "breedte" +STUW_TARGET_LEVEL_COLUMN = "WS_STREEFPEILLAAG" +STUW_CREST_LEVEL_COLUMN = "LAAGSTEDOORSTROOMHOOGTE" +STUW_CODE_COLUMN = "WS_DOMMELID" +STUW_WIDTH_COLUMN = "KRUINBREEDTE" +STUW_NAME_COLUMN = "NAAM" + +KDU_INVERT_LEVEL_US_COLUMN = "WS_BODEMHOOGTEBOV" +KDU_INVERT_LEVEL_DS_COLUMN = "WS_BODEMHOOGTEBEN" +KDU_WIDTH_COLUMN = "BREEDTEOPENING" +KDU_HEIGHT_COLUMN = "HOOGTEOPENING" +KDU_SHAPE_COLUMN = "VORMKOKER2" +KDU_SHAPE_MAP = { + "rond": "round", + "rechthoekig": "rectangle", + "eivorming": "ellipse", + "heulprofiel": "ellipse", + "muilprofiel": "ellipse", + "ellipsvormig": "ellipse", +} + +KGM_CAPACITY_COLUMN = "MAXIMALECAPACITEIT" +KGM_NAME_COLUMN = "NAAM" +KGM_CODE_COLUMN = "WS_DOMMELID" + + +cloud = CloudStorage() + + +# %% Voorbereiden profielen uit HyDAMO +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_areas", "model.toml") +model = Model.read(ribasim_toml) +model.tabulated_rating_curve.static.df = None +model.manning_resistance.static.df = None +model.outlet.static.df = None + + +profile_gpkg = cloud.joinpath("DeDommel", "verwerkt", "profile.gpkg") +hydamo_gpkg = cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg") +stuw_df = gpd.read_file(hydamo_gpkg, layer="stuw", engine="pyogrio") +stuw_df.loc[stuw_df.CODE.isna(), ["CODE"]] = stuw_df[stuw_df.CODE.isna()].NAAM +stuw_df.loc[stuw_df.CODE.isna(), ["CODE"]] = stuw_df[stuw_df.CODE.isna()].WS_DOMMELID +stuw_df.set_index("CODE", inplace=True) + +kdu_df = gpd.read_file(hydamo_gpkg, layer="duikersifonhevel", engine="pyogrio").set_index("CODE") + +kgm_df = gpd.read_file(hydamo_gpkg, layer="gemaal", engine="pyogrio").set_index("CODE") + +basin_area_df = gpd.read_file(cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg"), engine="pyogrio").set_index( + "node_id" +) + +if not profile_gpkg.exists(): + profielpunt_gdf = gpd.read_file( + hydamo_gpkg, + layer="profielpunt", + engine="pyogrio", + fid_as_index=True, + ) + + profiellijn_gdf = gpd.read_file( + hydamo_gpkg, + layer="profiellijn", + engine="pyogrio", + fid_as_index=True, + ).set_index("GLOBALID") + + hydroobject_gdf = gpd.read_file( + hydamo_gpkg, + layer="hydroobject", + engine="pyogrio", + fid_as_index=True, + ) + + area_df = gpd.read_file( + cloud.joinpath("DeDommel", "verwerkt", "watervlakken", "LWW_2023_A_water_vlak_V.shp"), + engine="pyogrio", + fid_as_index=True, + ) + + data = [] + for profiel_id, df in profielpunt_gdf.groupby(PROFIEL_ID_COLUMN): + if not df.empty: + if profiel_id in profiellijn_gdf.index: + lowest_point = df.at[df[PROFIEL_HOOGTE_COLUMN].idxmin(), "geometry"] + containing_area_df = area_df[area_df.contains(lowest_point)] + if (not containing_area_df.empty) | (len(containing_area_df) > 1): + area_poly = containing_area_df.iloc[0].geometry + print(profiel_id) + profiel_geom = profiellijn_gdf.at[profiel_id, "geometry"] + breedte = profiellijn_gdf.at[profiel_id, PROFIEL_BREEDTE_COLUMN] + profiel_geom = profiel_geom.intersection(area_poly) + if isinstance(profiel_geom, MultiLineString): + geoms = [i for i in profiel_geom.geoms if hydroobject_gdf.intersects(i).any()] + else: + geoms = [profiel_geom] + + bodemhoogte = df[PROFIEL_HOOGTE_COLUMN].min() + insteekhoogte = df[PROFIEL_HOOGTE_COLUMN].max() + waterlijnhoogte = df[df.within(area_poly)][PROFIEL_HOOGTE_COLUMN].max() + + data += [ + { + "profiel_id": profiel_id, + "bodemhoogte": bodemhoogte, + "insteekhoogte": insteekhoogte, + "waterlijnhoogte": waterlijnhoogte, + "breedte": breedte, + "geometry": geom, + } + for geom in geoms + ] + + profile_df = gpd.GeoDataFrame(data, crs=profielpunt_gdf.crs) + profile_df = profile_df[~profile_df.is_empty] + profile_df.drop_duplicates("profiel_id", inplace=True) + profile_df.to_file(profile_gpkg, engine="pyogrio") +else: + profile_df = gpd.read_file(profile_gpkg, engine="pyogrio", fid_as_index=True) + profile_df.drop_duplicates("profiel_id", inplace=True) + + +# %% Basin / Profile +# of all profiles within basin/area we take the one with the lowest level +def get_area_and_profile(node_id): + area_geometry = None + + # try to get a sensible area_geometry from basin-area + if node_id in model.basin.area.df.node_id.to_list(): + area_geometry = model.basin.area[node_id].set_index("node_id").at[node_id, "geometry"] + if area_geometry.area > 1000: + selected_profiles_df = profile_df[profile_df.intersects(area_geometry)] + else: + area_geometry = None + + # if we didn't get an area (of sufficient size) we get it from profiles and edges + if area_geometry is None: + edges_select_df = model.edge.df[(model.edge.df.from_node_id == node_id) | (model.edge.df.to_node_id == node_id)] + selected_profiles_df = profile_df[profile_df.intersects(edges_select_df.union_all())] + if selected_profiles_df.empty: + width = 2 + else: + width = selected_profiles_df.length.mean() + area_geometry = edges_select_df.buffer(width / 2).union_all() + + # select profile + if not selected_profiles_df.empty: # we select the profile with the lowest level + profile = selected_profiles_df.loc[selected_profiles_df["bodemhoogte"].idxmin()] + + else: # we select closest profile + print(f"basin without intersecting profile {row.Index}") + profile = profile_df.loc[profile_df.distance(row.geometry).idxmin()] + + return area_geometry, profile + + +# %% update basin / profile +basin_profile_df = model.basin.profile.df[model.basin.profile.df.node_id == -999] +for row in model.basin.node.df.itertuples(): + area_geometry, profile = get_area_and_profile(row.Index) + + level = [profile.bodemhoogte, profile.insteekhoogte] + area = [1, round(max(area_geometry.area, 999))] + + # remove profile from basin + model.basin.profile.df = model.basin.profile.df[model.basin.profile.df.node_id != row.Index] + + # add profile to basin + basin_profile_df = pd.concat( + [ + basin_profile_df, + pd.DataFrame({"node_id": [row.Index] * len(level), "level": level, "area": area}), + ] + ) + + model.basin.node.df.loc[row.Index, ["meta_profile_id"]] = profile[PROFIEL_LINE_ID_COLUMN] + + +basin_profile_df.reset_index(inplace=True, drop=True) +basin_profile_df.index.name = "fid" +model.basin.profile.df = basin_profile_df + +# set basin state +state_df = model.basin.profile.df.groupby("node_id").max()["level"].reset_index() +state_df.index.name = "fid" +model.basin.state.df = state_df + +# %% +# Stuwen als tabulated_rating_cuves +for row in model.node_table().df[model.node_table().df.meta_object_type == "stuw"].itertuples(): + node_id = row.Index + + # get weir + if row.name in stuw_df.index: + kst = stuw_df.loc[row.name] + elif stuw_df.distance(row.geometry).min() < 1: + kst = stuw_df.loc[stuw_df.distance(row.geometry).idxmin()] + else: + raise ValueError(f"Geen stuw gevonden voor node_id {node_id}") + + if isinstance(kst, gpd.GeoDataFrame): + kst = kst.iloc[0] + name = kst[STUW_NAME_COLUMN] + code = kst[STUW_CODE_COLUMN] + if pd.isna(name): + name = code + + # get upstream, if at flowboundary downstream profile + basin_node_id = model.upstream_node_id(node_id) + if not model.node_table().df.at[basin_node_id, "node_type"] == "Basin": + basin_node_id = model.downstream_node_id(node_id) + + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + + # get level + + # from target-level + crest_level = kst[STUW_TARGET_LEVEL_COLUMN] + + # if NA crest-level + if pd.isna(crest_level): + crest_level = kst[STUW_CREST_LEVEL_COLUMN] + + # if NA upstream min basin-level + 10cm + if pd.isna(crest_level): + crest_level = profile.waterlijnhoogte + if pd.isna(crest_level): + crest_level = profile[["bodemhoogte", "waterlijnhoogte"]].mean() + + # if crest_level < upstream bottom-level we lower it to upstream bottom-level + if crest_level < profile.bodemhoogte: + print(f"crest-level lower than upstream basin {node_id}") + crest_level = profile.bodemhoogte + 0.1 + + # get width + crest_width = kst[STUW_WIDTH_COLUMN] + + # if NA or implausible we overwrite with 0.5 of profile width + if pd.isna(crest_width) | (crest_width > profile.geometry.length): # cannot be > profile width + crest_width = 0.5 * profile.geometry.length # assumption is 1/2 profile_width + + # get data + data = [get_tabulated_rating_curve(crest_level=crest_level, width=crest_width)] + + model.update_node( + node_id, + node_type="TabulatedRatingCurve", + data=data, + node_properties={"name": name, "meta_code_waterbeheerder": code}, + ) + +# %% Duikers als tabulated_rating_cuves +for row in model.node_table().df[model.node_table().df.meta_object_type == "duikersifonhevel"].itertuples(): + node_id = row.Index + + # get culvert + if row.name in kdu_df.index: + kdu = kdu_df.loc[row.name] + elif kdu_df.distance(row.geometry).min() < 1: + kdu = kdu_df.loc[kdu_df.distance(row.geometry).idxmin()] + else: + raise ValueError(f"Geen stuw gevonden voor node_id {node_id}") + + # get upstream, if at flowboundary downstream profile + basin_node_id = model.upstream_node_id(node_id) + if not model.node_table().df.at[basin_node_id, "node_type"] == "Basin": + basin_node_id = model.downstream_node_id(node_id) + + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + + # get level + + # from invert-levels + crest_level = kdu[[KDU_INVERT_LEVEL_US_COLUMN, KDU_INVERT_LEVEL_DS_COLUMN]].dropna().max() + + # if NA upstream min basin-level + 10cm + if pd.isna(crest_level): + crest_level = profile.waterlijnhoogte + if pd.isna(crest_level): + crest_level = profile[["bodemhoogte", "waterlijnhoogte"]].mean() + + # if crest_level < upstream bottom-level we lower it to upstream bottom-level + if crest_level < profile.bodemhoogte: + print(f"crest-level lower than upstream basin {node_id}") + crest_level = profile.bodemhoogte + 0.1 + + # get width + width = kdu[KDU_WIDTH_COLUMN] + + # if NA or implausible we overwrite with 0.5 of profile width + if pd.isna(width) | (width > profile.geometry.length): # cannot be > profile width + width = profile.geometry.length / 3 # assumption is 1/3 profile_width + + # get height + height = kdu[KDU_HEIGHT_COLUMN] + + if pd.isna(height): + height = width + + # get shape + shape = kdu[KDU_SHAPE_COLUMN] + + if pd.isna(shape): + shape = "rectangle" + else: + shape = KDU_SHAPE_MAP[shape] + + # update model + data = get_tabulated_rating_curve( + crest_level=crest_level, + width=width, + height=height, + shape=shape, + levels=[0, 0.1, 0.5], + ) + + model.update_node( + node_id, + node_type="TabulatedRatingCurve", + data=[data], + node_properties={"meta_code_waterbeheerder": row.name}, + ) + +# %% gemalen als pump +for row in model.node_table().df[model.node_table().df.meta_object_type == "gemaal"].itertuples(): + node_id = row.Index + + # get upstream profile + basin_node_id = model.upstream_node_id(node_id) + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + min_upstream_level = profile.waterlijnhoogte + + kgm = kgm_df.loc[row.name] + + # set name and code column + name = kgm[KGM_NAME_COLUMN] + code = kgm[KGM_CODE_COLUMN] + + if pd.isna(name): + name = code + + # get flow_rate + flow_rate = kgm[KGM_CAPACITY_COLUMN] + + if pd.isna(flow_rate): + flow_rate = round( + basin_area_df.at[model.upstream_node_id(node_id), "geometry"].area * 0.015 / 86400, 2 + ) # 15mm/day of upstream areay + + data = pump.Static(flow_rate=[flow_rate], min_upstream_level=min_upstream_level) + + model.update_node( + node_id, + node_type="Pump", + data=[data], + node_properties={"name": name, "meta_code_waterbeheerder": code}, + ) + +# %% update open water +for row in model.node_table().df[model.node_table().df.node_type == "ManningResistance"].itertuples(): + node_id = row.Index + + # get depth + basin_node_id = model.upstream_node_id(node_id) + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + depth = profile.insteekhoogte - profile.bodemhoogte + + # compute profile_width from slope + profile_slope = 0.5 + profile_width = profile.geometry.length - ((depth / profile_slope) * 2) + + # if width < 1/3 * profile.geometry.length (width at invert), we compute profile_slope from profile_width + if profile_width < profile.geometry.length / 3: + profile_width = profile.geometry.length / 3 + profile_slope = depth / profile_width + + # get length + length = round( + model.edge.df[(model.edge.df.from_node_id == node_id) | (model.edge.df.to_node_id == node_id)].length.sum() + ) + + # # update node + data = manning_resistance.Static( + length=[round(length)], + profile_width=[round(profile_width, 2)], + profile_slope=[round(profile_slope, 2)], + manning_n=[0.04], + ) + model.update_node(node_id, node_type="ManningResistance", data=[data]) + + +# %% update outlets +for row in model.node_table().df[model.node_table().df.node_type == "Outlet"].itertuples(): + node_id = row.Index + + # get upstream, if at flowboundary downstream profile + basin_node_id = model.upstream_node_id(node_id) + if not model.node_table().df.at[basin_node_id, "node_type"] == "Basin": + basin_node_id = model.downstream_node_id(node_id) + + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + + height = round(profile.insteekhoogte - profile.bodemhoogte, 2) + width = round(profile.geometry.length, 2) + + try: + crest_level = round(model.upstream_profile(node_id).level.min() + 0.1, 2) + except ValueError: + crest_level = round(model.downstream_profile(node_id).level.min() + 0.1, 2) + + data = get_outlet(crest_level=crest_level, width=width, height=height, max_velocity=0.5) + + model.update_node(node_id, node_type="Outlet", data=[data]) + +# %% clean boundaries +model.flow_boundary.static.df = model.flow_boundary.static.df[ + model.flow_boundary.static.df.node_id.isin( + model.node_table().df[model.node_table().df.node_type == "FlowBoundary"].index + ) +] +model.flow_boundary.static.df.loc[:, "flow_rate"] = 0 +# %% write model +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_parameterized", "model.toml") +model.write(ribasim_toml) + +# %% diff --git a/notebooks/de_dommel/rare_edges.gpkg b/notebooks/de_dommel/rare_edges.gpkg new file mode 100644 index 0000000000000000000000000000000000000000..141490d65b79acdb953d6866ff644372633e3683 GIT binary patch literal 106496 zcmeI53w#^Zndmio`aOBeiSsyCFp?TM@gt7ykPs&J#3QifM3xgw8iZnv?IC(N(iq#} zu{chkk1h8WE+x=i3VUG}?iSkZ50a2e0^#!9?UR;Ww%shHyDhXpFNM7!5GeP|XOp>?7%%@0{|!^mq)FI0vXZDcofhglR1^|0Xx>JFLnc@dUCEYrrJ zaQ8Nxp(7YVJ!~i27C~Vq*u(mmjosl$0C|;qp>zs~v7HGXc};cjdeJpPIuR53Eb33E zvSKDCq{OvIZKd2i)E`Tw(jwZ)qXeJLB5^2|LiJuh+QzZCroIrnZDpSPI#ln!2(h8| zy_eFisx25}^;8)5q|?c0g1?4OL`R0>xEe-Ce-fgp1Px*D=5~pHdI05Q&Jn0m$Fl*d zt3n~1b@@1T((6#23UghB0@E=Am}lvcIlZb+YcM}$f6P^^pf}C{-(*{v%UxSb-P9`g z1*4xC-WiO$e4kjoq@Q@OUpy6Fn zQB0!E;b0dN?n9TceR>7~T^I8QblC=Qwq!D@qa}@Q!H$rz1V0M1oA7WFYGZp8+;}(= z`IMN|q4xVXlsnx$wUk9j#rd)9?gXAmqp=Y&Ex(T%%@nOS##Br$EpfSderMS{*-9eJjt=^zq7X|&GyFg_JDx>8RnXs2>U3{!nz4m3d9#%zdz9_7 zxLoe$X6oi;xehYTY(zj?!5gcUDQ6)XJ>h`}EZg;Q3x+AXg->&yL{S{dzCy4F$LK;)y=j zi}U1hAD84)`G(gC&!}q~y{SKX?!FW(cDi>oPwPvXQB;qJHWc)Ul3>x97p06oMh(Ri z3mh)@s#Vm)_MBs;2Q%1O%77^m)7`sjT3mX)YQNJ*xh5A|U2dBbJVP60^+~wnq%(0mRTZ4R zDYbA=5Y6=Z9UsP1q3{S4z{}^ zKhYDw`?cOKy}T#Tvl$;ubO+nFdp6?p3O<4ATUOV50&5zZn;To3SJpQ*1sYmg@pD5< zeM3ut>5XvR;b5e1yGN>nC*WzWt6S~av7RVe^H#Qc?+FIYxP`D1OwK{ZPiqCw4 z-J$KC?wtu?_Xr>D5%~;{cjU3*A)()c>&b+fE;ho3w|np*k06f3`Dl6|n#QMELdp|p zYQPlM)UT;)k{@gGGW6qpbOv9d!=@7ALQE20+1S#&rm;nRwC0It`(yZcY#`PzrZZ9- zTk)f=IbUWVmdNr^9&fjW)SxGTEm2ojmsg4+k;0BKB#W$G(gH!$?YGLhwCECd}`UQoR+p&2A{=Y z+C<66TH)FYloOgp$>h9y2Z~qV?zT^uShB672equ0su$-)pf00e*l5C8%|00;m9An<7rh*720ORCG) zyYR!BiQ)TY`1l~7Rd1fj=3*M^8dlfVwbs@(Md};Z)-|kcY+Z??u3#)DS}wfvX_{Hk zQ@QZHy6-afJf00cfa1j=pI#kng7u>b$u7#11=1b_e#00KY&2mk>f00e*l5C8%|Kt~{d|F7Kt z|F(sGTPFf9fdCKy0zd!=00AHX1b_e#00KY&2mpcenm`3*tEO_t|9&|CKd;RRl?MVq z00;m9AOHk_01yBIKmZ5;0U$61fkNm1CoJ@dDa0Wf2mk>f00e*l5C8%|00;m9AOHk_ z01)_W6R4(K)$^;ZMMb#-f9dx>JA)w>|B`Yrgn!{gIsbplLcjIdZVJ>62mk>f00e*l z5C8%|00;m9AOHk_01%iVfjRc-`MU4^ldc88{r@v$2Vy_~2mk>f00e*l5C8%|00;m9 zAOHl;D+1>G|1Go5s|G-2fdCKy0zd!=00AHX1b_e#00KY&2mpaIBp{vt!~Xvap@BFM z00KY&2mk>f00e*l5C8%|00;nq^NfJ>`+upP-|I4I{5Hy-*omw2|G^9cEGQ=5{G_ z_a>w&lr^F5khvT$!Z{Gjv~eihy$u)85e%Uowv%m(pfD5cVSUWT?ri9@jzs`vWQHjc$L z_l4MPEAuM74%PcFLTspgZ~H6->+Bg}6;fF~BO0-t8CInX4Qnve!-ga0h*2r?Q^a^F z?W)>>y|a2MjC<1QWHiBF!za$xH%%pI2vyHa51^dPIRZUXX%kSz0uE>Kjtci&Jy2-a;Lkema+(`I6s!%oe)Gm8XFPQ^82Wm*PsNwWcJYZ%w{M^=%Gh zGufyRHybA@q@rUcxKix6IW{&{NEXMMW%E%EiplIJ#?oLWxoq4#X_C(wNNKOxIN9Nt zD8v%c3_lRfj%Sfi74&zMI^ElwX53FPd2?Tt>`}JQ;&Qp0o2i?Z<@&4i{mZeyN#;`lIMNMI`8*n=X$P@mL^v|g7qN9in$`%Z=dN{dzCy4F$LK;`LIl6Xwa|E-J~V@(r&Oo>9*~!yFp4OK%qo^JcZ7ApyCBdRIFG`t1j2en37C2n)Rja6p?K#Ix4`#5nl<`Wg zfIh-$c1V4)&MwmoJOg}89Lexl%Sb9WIkdBznBGoh=J7L5r@MF6w7B$o)qbasa!oF_ zy4+qbwSP3%w!C(cH#T#dNRr;srfCq~$;6^kSIU)8BEhcQ42OuI4{tICL?OxhyfuBbHObnVc%+6~ zThq0+X5b2Mz?({sdj0-@ek2bd{RkdN>IQC{q#Lje^MbiEWTfsO%^YVlx@v=KtKI4D z@=j}EVgQS#h7>CZWbVIGiu-wqbPxMR0 zv)%3$D8snn!k)aGF4p#b!37H2 zsu{68%iW(|_h)SXr>`11N0xo_XXK~Ke&^i$G_Q;){Z!pqeOi~y6t-$5&Wx>^N$02Y zX5^#AK<9o0qq z$<$SNt)X05tW-TD9ib)N7{CIq`BXdE7K!g`46G)&i0j_ z0U_=|>ydN@puA>6?aCCaX2nyobUhA9@zV6%6Nv`1Y;sr}mnUjP%I{Z?G03jxNsR;HJ63mNKuNrSd zt}3>=XkU-&=>;pK`eu&Q%;+oKg1{#T3L*EU_|cU)u=178^PKEN=~9=QVW^4fX;$LW z%D6CbDW#HNmah>P_LZn4IPDr-+6en6=p`<9TN^b|ldBCJ4#MH!)MuewG z!4Qj2C4!-j0P+N-;~)>&+u{i6DuPGe!{avt8GIF0>VSCDuNM;2g<_@J%U7hd{MnJ6 zDP9~+XLiX2NY|bG4!p6a=+n~neja_gVr%&uhmMoh)`3KNbX|zKSh&Ef>MO6j_|EJxig|U(>7S!X>s*qXL!5#Di7cH_f6Gwt&r;wFtedPI*wd*RG*oXhT zXg_||T15m&f-!^rXO7d)p(?v+qCp zXi(QGB1i;C1|n$xf&CR~Qd70mtm^$|lB?fEcIKvMFFRD#WM_{4+q@k=9(6NEntpJ) z^~*X{MgfwM2-=@@lu)xZRZGmOd`H_RznHKyqsP9r=$W*gdH$PE-(%n4X8!mg6?nEo zr^+ZmG7>?%=vZcXPE)nmtcuEf<(9-+8}nk|hYt;}w=sJUeC^4le_X&EdueN;o6@N= z3XqIM&>nYAP&aC-s?4h1UG`%7!UC#}hQD!d?{un2Mj~k6iB;X8saj-Kb@Pt~UMeo2 z>h!G-TBmIl$w&n4F|6u(P1QoPs*ykY=q`5wRn3bwEelWADw2^1+OKj>SPpBdD$S~{ z-_UgBsn;#cA9frGKJZ%$v$5#gi@$cin>qgX_dfH&gF0Io1xQ9BXur}v*K$x(afAef_PE{{eU9fBxZn9Um@WEPd|LKey^yWfUM8iJAh^nFa zswyfsUzOeV;NIs8sA@m{zQt6%nTRc2NH?aROM{$M&) zBqI^Dy=`CKM>Gku&5e3-Ys$CrE*nFbTd=sWPU1)9Y)RYZ^okPO6fwi@exS3*-on^k?Z^Y>TGd6#0y zv}av$^VU086qpx@AQ2!L$UJHN$hy{XT2obFRz((ABqv#DkrHyPQkQN-fMg(o)(@?% zI#uOnRUeFtd*9n@W4`{@bx#cMw=wNsO}tkAr3K7C+~|AzR{X4~G76B4M9}J?*16JJ ztIEu(c4J#D?Xok*CCPp~I=y^_&ex3sBqI^D&bEHjF;`PnYF0(o2mj$e_WL*PvuO*8 z*M6{mmXvIY(|~R%BKs%|Z5N8)IB2Jof$#zkANE zYn4%eWYh%bQa9RjYZ%wms=n+`{r3o-iXE$GuaRbXtpu_SF$$Pe&7uBGr^-3CD&vyG zq{=8@QZ<{pL8rAn~f>Gz<0wj{Ji0+UvGBcg~jvh$18W@XUzkNAQ2!L zh@jSspj70D{>qGW8HBFRR z)4N|{uIgGuF=R@;?vB*0$MA$mrdF*{M1Ulaj3mR=mP0Nt)`Yjrb8DpU|D)))E%*-} zKmZ5;0U!VbfB+Bx0zd!=00AHX1c1Qjgg^yltEO`IH29_ce+@pgr@ui5DvrbXKb-&P zZV-U;f8`Doa;#6T!jt2Day4GN@_+A7!|Ces} zBggp0I|<0KzBbqz1xQ9B2hI!@7BGyu(D?Xv;V-p@88*{`v4CSAPFQR$>6%r+Hb#` z+$b;j%&LAlt7Z7(H>}Lxe*f6~qif00e*l5C8%| z00;nqPnf_}l(Tw%?e6N_4g1m;$;v-VpnQ*~{CiPDqBy*E)vD3a(UmxPIM%-_Hps8U zKNqknJIwdjH`J|K&X3_AAIjq26o|_CMl<*ar)txw#CQp=F-8B21^>YV2mk>f00e*l z5C8%|00;m9AOHk_01!A22`qQuiwDKiUptV_|NV5Mg+5K6r2jy_Nxw$_iatTVO#hVr zA^igVH2nm9gnpF%F8#OkU(@%}e?i|t-%5Xt{t`V&-$0MkBAuqMrg{1*dON+9-a>cM z9rQ+eJ>7zfga;4+0zd!=00AHX1b_e#00KY&2mpcenSjl2vlQEWQs9+>3#Fh&3NDa> z3KmL1r4+cOV1X3Omx6gxFjorZNWp9=m?Z_Y z6jVq-xfGO1L8%m!NI|g_6iIz>&Ud(_P8)Z4 z-cIiDJxkSi8@Ko`Z^8fn!5u&4=Z>GAt;X9q*NP3CtK)xhM?)d*XzEop-oY(A?BN#v zxSKm%GR7UgkWu5EO8ifnaQs8Mc$X6Y;$Iu&iG`XLvcHPB29{eHqc$8A(OSvVM+qos)7 z*9dob(eJp!n|7)3bFr=dPcB}w*F0SI-Q4mYe}v0^ggf}mQB{6EwqZNB^eT!w^s9@x z!}DEg`~nKeX)p$3qONLt#d=%Fu!98W$sK!@f+djp)y4tv-fkABJ&1(EYY}?1V zg>T%<9eNx4P|4TT_(eFrOBcUKjj!Ss{-jYC|F9arm|OVMxGw&BHGT=U?Z0!&Z}4-E zovOh$Ca1DXv271<)ernD_dkzKU>pBfm0!j!KJ@q4#&>X!y*U@#n3RvOO-^vLT~Bex zUrp)kQjKjsjD6??_MJJ{_G{I0Je=pxpXNM&bs=|jD8n7yRi(x+=bXPi&N)x7;*PDX z;f{5_uf|`%l|Fe3SNejBd$zuZdv@(=HNJ+U%MN2-Im|uz9)o>_^rH*8#ndmbZyn_x zX?sQIS6-!my*Nklhf^!ma(tXS`T^(Wf6IOEg{9p0zW*&X-p{#x*K_WxOS$hIcml^0 z4r=n}|4G{aTV{R!+X*cM0zd!=00AHX1b_e#00KY&2mk>f@c%ObY5z}Khb;8N6^~Sm zl)qlSwe0z_l_ftb?k#F|-t36mZ?cWzWOx99b0V;}%LVk+I z`2;WW(E&atj%4^~e|jV(MsuH8$-Qa|vrL3Vk#Ml1gAF6k3=8leW>W;qx3irr&fXnD zo?Mw8w2|!yhEVV3cByRni#5`N8C;7L{=N(`i+b1y%Bvb$Z+Kse>d`ii4YMeb9_2GW zF}5?oM^mvR@5i!AoHwV@>%R!Gq4vFH7Z%1Lq_UXJIpFvc1rQv;P!Ai9eDXL}oE46V z?KQ3{wz_EVcGarNx0~d!MWydOQGF#BYjla}a*>8l;Ro1+A(T$US9&B(9u(@1pipmT zr|KYTu1(?YF6=*IhUaxYDp$^N9PgnvJwib!DTU%k9&ug{s%15W7vOBVt1B$S1#5+8*qYTizW;J&|ZI%O;1#ab3FK ze>PBXFY{^o13B$Vf0(RYE+g81{R|K_o*kmbPY@Ay51EmKPI8q`5LBM`kZu}AVRmyT z)5fA;DAGNp3vOjPd)XevCwF#-6^G`VUf4XnynM@kpud7zPxoQS<>d%f&{r?L(XXe(QS?An_x`NU@)_Za?8(=5n(vHBm1wa|%v`(cxGI zubrk#TSz8uW@9Hi#bBluviJ zVQfqah19tGdfdOh%IV&+g0jfpUYFgSz+Xlejg5$D`F&Jgg==3}myOmNA{I=HE^@h7 zte_?px`t%K+xdAE4T(s%Tt^Z1@<^aRoft``g-)sAeX pd.DataFrame: def node_properties_to_table(table, node_properties, node_id): - if isinstance(node_id, int): - node_id = [node_id] # update DataFrame table_node_df = getattr(table, "node").df for column, value in node_properties.items(): - table_node_df.loc[table_node_df.node_id.isin(node_id), [column]] = value + table_node_df.loc[node_id, [column]] = value class BasinResults(BaseModel): @@ -50,7 +48,7 @@ def basin_results(self): @property def next_node_id(self): - return self.node_table().df.node_id.max() + 1 + return self.node_table().df.index.max() + 1 def find_node_id(self, ds_node_id=None, us_node_id=None, **kwargs) -> int: """Find a node_id by it's properties""" @@ -87,15 +85,43 @@ def find_node_id(self, ds_node_id=None, us_node_id=None, **kwargs) -> int: @property def unassigned_basin_area(self): """Get unassigned basin area""" - return self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.node_id)] + return self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.index)] @property def basin_node_without_area(self): """Get basin node without area""" - return self.basin.node.df[~self.basin.node.df.node_id.isin(self.basin.area.df.node_id)] + return self.basin.node.df[~self.basin.node.df.index.isin(self.basin.area.df.node_id)] + + def upstream_node_id(self, node_id: int): + """Get upstream node_id(s)""" + return self.edge.df.set_index("to_node_id").loc[node_id].from_node_id + + def upstream_profile(self, node_id: int): + """Get upstream basin-profile""" + upstream_node_id = self.upstream_node_id(node_id) + + node_type = self.node_table().df.loc[upstream_node_id].node_type + if node_type != "Basin": + raise ValueError(f"Upstream node_type is not a Basin, but {node_type}") + else: + return self.basin.profile[upstream_node_id] + + def downstream_node_id(self, node_id: int): + """Get downstream node_id(s)""" + return self.edge.df.set_index("from_node_id").loc[node_id].to_node_id + + def downstream_profile(self, node_id: int): + """Get upstream basin-profile""" + downstream_node_id = self.downstream_node_id(node_id) + + node_type = self.node_table().df.loc[downstream_node_id].node_type + if node_type != "Basin": + raise ValueError(f"Upstream node_type is not a Basin, but {node_type}") + else: + return self.basin.profile[downstream_node_id] def get_node_type(self, node_id: int): - return self.node_table().df.set_index("node_id").at[node_id, "node_type"] + return self.node_table().df.at[node_id, "node_type"] def get_node(self, node_id: int): """Return model-node by node_id""" @@ -113,7 +139,10 @@ def remove_node(self, node_id: int, remove_edges: bool = False): for attr in table.model_fields.keys(): df = getattr(table, attr).df if df is not None: - getattr(table, attr).df = df[df.node_id != node_id] + if node_id in df.columns: + getattr(table, attr).df = df[df.node_id != node_id] + else: + getattr(table, attr).df = df[df.index != node_id] if remove_edges and (self.edge.df is not None): for row in self.edge.df[self.edge.df.from_node_id == node_id].itertuples(): @@ -126,22 +155,28 @@ def remove_node(self, node_id: int, remove_edges: bool = False): ) def update_node(self, node_id, node_type, data, node_properties: dict = {}): - """Update a node type and/or data""" - # get existing network node_type - existing_node_type = self.node_table().df.set_index("node_id").at[node_id, "node_type"] + existing_node_type = self.node_table().df.at[node_id, "node_type"] # read existing table table = getattr(self, pascal_to_snake_case(existing_node_type)) # save node, so we can add it later - node_dict = table.node.df[table.node.df["node_id"] == node_id].iloc[0].to_dict() + node_dict = table.node.df.loc[node_id].to_dict() node_dict.pop("node_type") + node_dict["node_id"] = node_id # remove node from all tables for attr in table.model_fields.keys(): df = getattr(table, attr).df if df is not None: - getattr(table, attr).df = df[df.node_id != node_id] + if "node_id" in df.columns: + getattr(table, attr).df = df[df.node_id != node_id] + else: + getattr(table, attr).df = df[df.index != node_id] + + # remove from used node-ids so we can add it again in the same table + if node_id in table._parent._used_node_ids: + table._parent._used_node_ids.node_ids.remove(node_id) # add to table table = getattr(self, pascal_to_snake_case(node_type)) @@ -155,10 +190,6 @@ def update_node(self, node_id, node_type, data, node_properties: dict = {}): node_properties = {**node_properties, **node_dict} node_properties_to_table(table, node_properties, node_id) - # change type in edge table - self.edge.df.loc[self.edge.df["from_node_id"] == node_id, ["from_node_type"]] = node_type - self.edge.df.loc[self.edge.df["to_node_id"] == node_id, ["to_node_type"]] = node_type - def add_control_node( self, to_node_id: int | list, @@ -226,10 +257,6 @@ def reverse_edge(self, from_node_id: int, to_node_id: int): self.edge.df.loc[edge_id, ["from_node_id"]] = edge_data["to_node_id"] self.edge.df.loc[edge_id, ["to_node_id"]] = edge_data["from_node_id"] - # revert node types - self.edge.df.loc[edge_id, ["from_node_type"]] = edge_data["to_node_type"] - self.edge.df.loc[edge_id, ["to_node_type"]] = edge_data["from_node_type"] - # revert geometry self.edge.df.loc[edge_id, ["geometry"]] = edge_data["geometry"].reverse() @@ -282,7 +309,7 @@ def fix_unassigned_basin_area(self, method: str = "within", distance: float = 10 """ if self.basin.node.df is not None: if self.basin.area.df is not None: - basin_area_df = self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.node_id)] + basin_area_df = self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.index)] for row in basin_area_df.itertuples(): if method == "within": @@ -299,18 +326,18 @@ def fix_unassigned_basin_area(self, method: str = "within", distance: float = 10 ValueError(f"Supported methods are 'within' or 'closest', got '{method}'.") # check if basin_nodes within area are not yet assigned an area - basin_df = basin_df[~basin_df.node_id.isin(self.basin.area.df.node_id)] + basin_df = basin_df[~basin_df.index.isin(self.basin.area.df.node_id)] # if we have one node left we are done if len(basin_df) == 1: - self.basin.area.df.loc[row.Index, ["node_id"]] = basin_df.iloc[0].node_id + self.basin.area.df.loc[row.Index, ["node_id"]] = basin_df.index[0] else: raise ValueError("Assign Basin Area to your model first") else: raise ValueError("Assign a Basin Node to your model first") def reset_edge_geometry(self, edge_ids: list | None = None): - node_df = self.node_table().df.set_index("node_id") + node_df = self.node_table().df if edge_ids is not None: df = self.edge.df[self.edge.df.index.isin(edge_ids)] else: @@ -319,3 +346,13 @@ def reset_edge_geometry(self, edge_ids: list | None = None): for row in df.itertuples(): geometry = LineString([node_df.at[row.from_node_id, "geometry"], node_df.at[row.to_node_id, "geometry"]]) self.edge.df.loc[row.Index, ["geometry"]] = geometry + + @property + def edge_from_node_type(self): + node_df = self.node_table().df + return self.edge.df.from_node_id.apply(lambda x: node_df.at[x, "node_type"] if x in node_df.index else None) + + @property + def edge_to_node_type(self): + node_df = self.node_table().df + return self.edge.df.to_node_id.apply(lambda x: node_df.at[x, "node_type"] if x in node_df.index else None) diff --git a/src/ribasim_nl/ribasim_nl/network_validator.py b/src/ribasim_nl/ribasim_nl/network_validator.py index ba95378..72472c5 100644 --- a/src/ribasim_nl/ribasim_nl/network_validator.py +++ b/src/ribasim_nl/ribasim_nl/network_validator.py @@ -26,7 +26,7 @@ def check_node_connectivity(row, node_df, tolerance=1.0) -> bool: def check_internal_basin(row, edge_df) -> bool: if row.node_type == "Basin": - return row.node_id not in edge_df.from_node_id.to_numpy() + return row.name not in edge_df.from_node_id.to_numpy() else: return False @@ -81,7 +81,7 @@ def edge_missing_nodes(self): def edge_incorrect_from_node(self): """Check if the `from_node_type` in edge-table in matches the `node_type` of the corresponding node in the node-table""" - node_df = self.node_df.set_index("node_id") + node_df = self.node_df mask = ~self.edge_df.apply( lambda row: node_df.at[row["from_node_id"], "node_type"] == row["from_node_type"] if row["from_node_id"] in node_df.index @@ -92,7 +92,7 @@ def edge_incorrect_from_node(self): def edge_incorrect_to_node(self): """Check if the `to_node_type` in edge-table in matches the `node_type` of the corresponding node in the node-table""" - node_df = self.node_df.set_index("node_id") + node_df = self.node_df mask = ~self.edge_df.apply( lambda row: node_df.at[row["to_node_id"], "node_type"] == row["to_node_type"] if row["to_node_id"] in node_df.index @@ -103,7 +103,7 @@ def edge_incorrect_to_node(self): def edge_incorrect_connectivity(self): """Check if the geometries of the `from_node_id` and `to_node_id` are on the start and end vertices of the edge-geometry within tolerance (default=1m)""" - node_df = self.node_df.set_index("node_id") + node_df = self.node_df mask = self.edge_df.apply( lambda row: check_node_connectivity(row=row, node_df=node_df, tolerance=self.tolerance), axis=1 ) @@ -112,5 +112,5 @@ def edge_incorrect_connectivity(self): def edge_incorrect_type_connectivity(self, from_node_type="ManningResistance", to_node_type="LevelBoundary"): """Check edges that contain wrong connectivity""" - mask = (self.edge_df.from_node_type == from_node_type) & (self.edge_df.to_node_type == to_node_type) + mask = (self.model.edge_from_node_type == from_node_type) & (self.model.edge_to_node_type == to_node_type) return self.edge_df[mask] diff --git a/src/ribasim_nl/ribasim_nl/structure_node.py b/src/ribasim_nl/ribasim_nl/structure_node.py new file mode 100644 index 0000000..0e628ef --- /dev/null +++ b/src/ribasim_nl/ribasim_nl/structure_node.py @@ -0,0 +1,84 @@ +from math import pi + +from ribasim.nodes import outlet, tabulated_rating_curve + + +def calculate_area(width: float, shape: str, height: float | None = None): + """Calculate flow-area of a cross-section""" + # shapes that only use width + if shape == "round": + return pi * (width / 2) ** 2 + + # shapes that need height + elif height is None: + raise ValueError(f"for shape {shape} height cannot be None") + elif shape in ["rectangle"]: + return width * height + elif shape in ["ellipse"]: + return pi * (width / 2) * (height / 2) + + # shapes not implemented + else: + raise ValueError(f"shape {shape} not implemented") + + +def calculate_velocity( + level: float, + crest_level, +): + """Calculate velocity over a weir-type structure.""" + if crest_level > level: + return 0 + else: + return ((2 / 3) * 9.81 * (level - crest_level)) ** (1 / 2) + + +def calculate_flow_rate( + level: float, + crest_level: float, + width: float, + height: float | None = None, + loss_coefficient: float = 0.63, + shape: str = "rectangle", +): + velocity = calculate_velocity(level=level, crest_level=crest_level) + area = width * ((2 / 3) * (level - crest_level)) + if height is not None: + area = min(area, calculate_area(width=width, shape=shape, height=height)) + + return round(loss_coefficient * area * velocity, 2) + + +def get_outlet( + crest_level: float, width: float, shape: str = "rectangle", height: float | None = None, max_velocity: float = 1 +) -> outlet.Static: + """Return an outlet curve from structure-data""" + area = calculate_area(width=width, shape=shape, height=height) + flow_rate = round(area * max_velocity, 2) + + return outlet.Static(flow_rate=[flow_rate], min_upstream_level=crest_level) + + +def get_tabulated_rating_curve( + crest_level, + width, + loss_coefficient: float = 0.63, + height: float | None = None, + shape: str = "rectangle", + levels: list[float] = [0, 0.05, 0.1, 0.25, 0.5], +) -> tabulated_rating_curve.Static: + """Return a tabulated-rating curve from structure-data""" + level = [round(crest_level, 2) + i for i in levels] + flow_rate = [ + calculate_flow_rate( + level=i, + crest_level=crest_level, + width=width, + height=height, + shape=shape, + loss_coefficient=loss_coefficient, + ) + for i in level + ] + + return tabulated_rating_curve.Static(level=level, flow_rate=flow_rate) From 58d1e37ca00d4319e5db6ada26de8dd9cce316e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 16:25:00 +0200 Subject: [PATCH 02/23] Bump peter-evans/create-pull-request from 6 to 7 (#143) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [peter-evans/create-pull-request](https://github.com/peter-evans/create-pull-request) from 6 to 7.
Release notes

Sourced from peter-evans/create-pull-request's releases.

Create Pull Request v7.0.0

:sparkles: Now supports commit signing with bot-generated tokens! See "What's new" below. :writing_hand::robot:

Behaviour changes

  • Action input git-token has been renamed branch-token, to be more clear about its purpose. The branch-token is the token that the action will use to create and update the branch.
  • The action now handles requests that have been rate-limited by GitHub. Requests hitting a primary rate limit will retry twice, for a total of three attempts. Requests hitting a secondary rate limit will not be retried.
  • The pull-request-operation output now returns none when no operation was executed.
  • Removed deprecated output environment variable PULL_REQUEST_NUMBER. Please use the pull-request-number action output instead.

What's new

  • The action can now sign commits as github-actions[bot] when using GITHUB_TOKEN, or your own bot when using GitHub App tokens. See commit signing for details.
  • Action input draft now accepts a new value always-true. This will set the pull request to draft status when the pull request is updated, as well as on creation.
  • A new action input maintainer-can-modify indicates whether maintainers can modify the pull request. The default is true, which retains the existing behaviour of the action.
  • A new output pull-request-commits-verified returns true or false, indicating whether GitHub considers the signature of the branch's commits to be verified.

What's Changed

New Contributors

Full Changelog: https://github.com/peter-evans/create-pull-request/compare/v6.1.0...v7.0.0

Create Pull Request v6.1.0

✨ Adds pull-request-branch as an action output.

What's Changed

... (truncated)

Commits
  • 8867c4a fix: handle ambiguous argument failure on diff stat (#3312)
  • 6073f54 build(deps-dev): bump @​typescript-eslint/eslint-plugin (#3291)
  • 6d01b56 build(deps-dev): bump eslint-plugin-import from 2.29.1 to 2.30.0 (#3290)
  • 25cf845 build(deps-dev): bump @​typescript-eslint/parser from 7.17.0 to 7.18.0 (#3289)
  • d87b980 build(deps-dev): bump @​types/node from 18.19.46 to 18.19.48 (#3288)
  • 119d131 build(deps): bump peter-evans/create-pull-request from 6 to 7 (#3283)
  • 73e6230 docs: update readme
  • c0348e8 ci: add v7 to workflow
  • 4320041 feat: signed commits (v7) (#3057)
  • 0c2a66f build(deps-dev): bump ts-jest from 29.2.4 to 29.2.5 (#3256)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=peter-evans/create-pull-request&package-manager=github_actions&previous-version=6&new-version=7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/pre-commit_auto_update.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pre-commit_auto_update.yml b/.github/workflows/pre-commit_auto_update.yml index 49ace1c..5d8f614 100644 --- a/.github/workflows/pre-commit_auto_update.yml +++ b/.github/workflows/pre-commit_auto_update.yml @@ -18,7 +18,7 @@ jobs: with: python-version: "3.11" - uses: browniebroke/pre-commit-autoupdate-action@main - - uses: peter-evans/create-pull-request@v6 + - uses: peter-evans/create-pull-request@v7 with: token: ${{ secrets.GITHUB_TOKEN }} branch: update/pre-commit-hooks From a21bba6499936615e007fb3ea8f49663c4aff0bb Mon Sep 17 00:00:00 2001 From: Maarten Pronk Date: Sun, 15 Sep 2024 14:39:31 +0200 Subject: [PATCH 03/23] Update pixi.lock to fix quartodoc (#144) --- .github/workflows/docs.yml | 3 +- .github/workflows/python_lint.yml | 1 + .github/workflows/python_tests.yml | 1 + pixi.lock | 8835 +++++++++++++++------------- 4 files changed, 4610 insertions(+), 4230 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 6588e81..780b804 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -29,8 +29,9 @@ jobs: - uses: prefix-dev/setup-pixi@v0.8.1 with: - manifest-path: Ribasim/pixi.toml + manifest-path: Ribasim-NL/pixi.toml pixi-version: "latest" + frozen: true - name: Check Quarto installation and all engines working-directory: Ribasim-NL diff --git a/.github/workflows/python_lint.yml b/.github/workflows/python_lint.yml index 4fbe4cd..54016f5 100644 --- a/.github/workflows/python_lint.yml +++ b/.github/workflows/python_lint.yml @@ -31,6 +31,7 @@ jobs: with: manifest-path: Ribasim-NL/pixi.toml pixi-version: "latest" + frozen: true - name: Run mypy on hydamo working-directory: Ribasim-NL diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml index 708bd8a..d1a0b23 100644 --- a/.github/workflows/python_tests.yml +++ b/.github/workflows/python_tests.yml @@ -37,6 +37,7 @@ jobs: with: manifest-path: Ribasim-NL/pixi.toml pixi-version: "latest" + frozen: true - name: Run tests working-directory: Ribasim-NL diff --git a/pixi.lock b/pixi.lock index 632fd8b..36233b7 100644 --- a/pixi.lock +++ b/pixi.lock @@ -15,25 +15,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h98912ed_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.25-h15d0e8c_6.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.3-h8dac057_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.27-h4bc722e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.18-h038f3f9_10.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.2-h570d160_21.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.7-ha1f794c_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h0040ed1_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-hc14a930_17.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.4-h558cea2_8.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h038f3f9_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h038f3f9_10.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.27.5-hd0b8a3b_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h7dc8893_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.29-h03582ad_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.8-h5e77a74_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-hc2627b9_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h01636a3_19.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-h191b246_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h29c84ef_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda @@ -44,20 +44,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h30efb56_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.0-ha66036c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.0-py312h1671c18_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda @@ -67,33 +67,33 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.1-py312h8572e83_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py312h98912ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dart-sass-1.58.3-ha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.11-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312hca68cad_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312h2ec8cdc_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-1.41.0-hfc7925d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-dom-0.1.35-hd9586b0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.23.1-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.6-py312h96884de_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.0-py312h5aa26c2_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 @@ -103,12 +103,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h7eda2e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda @@ -120,7 +120,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda @@ -128,25 +128,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.2.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.2.0-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda @@ -156,12 +156,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hee9dde6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hf8d3e68_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py312h8572e83_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda @@ -169,52 +169,58 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8756180_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-he02047a_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-he02047a_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hc9a23c6_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-23_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-23_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.0-hbbe4b11_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.122-h4ab18f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hba09cee_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-hdd6600c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-h5f34788_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-ha39a594_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-ha2ed5f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h2ebfdf0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h2b45729_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-h94e7027_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h0fa2cb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-he047751_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-he047751_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h9d8aadb_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h062f1c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hbd1db40_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h151b34b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h151b34b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hbbc8833_1020.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-23_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm18-18.1.8-h8b73ec9_2.conda @@ -222,38 +228,39 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-haa1307c_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h482b261_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-hc0a3c3a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-hb90f79a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h46a8edc_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h9c5d478_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312h03f37cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h98912ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312h854627b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -261,60 +268,60 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.8-py312h2492b07_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.1-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.3.0-h70512c7_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.3.0-ha479ceb_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h70512c7_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-ha479ceb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.103-h593d115_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.1-py312h1103770_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.2-py312h58c1407_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h98912ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.2-py312h1d6d2e6_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240807-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.3-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2.1-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h287a98d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h56024de_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-hb0d391f_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-ha8faf9a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h9a8786e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda @@ -322,15 +329,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.20.1-py312hf008fa9_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.3-py312h12e396e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h8ad7a51_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h01329cd_8.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312hb5137db_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h5aa26c2_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312h91f0f75_3.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda @@ -339,14 +346,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h41a817b_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.1.0-py312h7ab5c7e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.55-ha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.10-py312hff7f44f_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.57-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda @@ -355,17 +362,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.7.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312hf008fa9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.1-py312hbe4c86d_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.0-h3400bea_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.1-py312h775a589_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.0-py312h499d17b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.4-py312hd18ad41_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.2-h7b32b05_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-72.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.5-py312h8413631_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda @@ -374,54 +381,56 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sphobjinv-2.3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.0-h6d4b2fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.1-h9eae976_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.25.0-h213c483_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h9a8786e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240712-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/typst-0.11.0-he8a937b_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024a-h3f72095_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h8572e83_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-4.0.1-py312h7900ff3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.0-h5291e77_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.2-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h98912ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.4-h4ab18f5_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h666cd97_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.42-h4ab18f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda @@ -429,19 +438,24 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-h4bc722e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-h4bc722e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-recordproto-1.14.2-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h75354e8_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312h3483029_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl - pypi: src/bokeh_helpers @@ -455,25 +469,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312h104f124_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312hb553811_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.25-h4880c77_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.3-hf37c103_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.27-hfdf4475_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.18-hf37c103_10.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.2-h324d61a_21.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.7-h3c4ec21_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hdc1c6f6_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-h93740dc_17.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.4-hd06a241_8.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-hf37c103_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-hf37c103_10.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.27.5-hfadd0b3_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-hae762b9_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.29-h2dfa2de_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hf6f7cdd_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.8-h2f86973_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-he4b61a0_19.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.5-h915d0f8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-h8128ea2_11.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.2-h27d4fa7_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-h7a58a96_9.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.13.0-hf8dbe3c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.8.0-h60298e3_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.12.0-h646f05d_0.conda @@ -484,20 +498,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/blosc-1.21.6-h7d75f6d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312heafc425_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312h5861a67_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.0-h51dda26_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cairo-1.18.0-h37bd5c4_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.0-py312h9620c06_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.1-py312hf857d28_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-ha105788_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda @@ -507,31 +521,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.2.1-py312h9230928_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-0.12.3-py312h41838bb_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/dart-sass-1.58.3-h694c41f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.11-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h28f332c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h5861a67_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-1.41.0-h86af993_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-dom-0.1.35-h08cba0f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.23.1-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.2-h73e2aa4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.9.6-py312hfc836c0_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.3-hac325c4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.0-py312hcd3578f_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/fmt-11.0.2-h3c5361c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 @@ -541,12 +555,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/fontconfig-2.14.2-h5bb23bf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/freetype-2.12.1-h60636b9_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/freexl-2.0.0-h3ec172f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h29648be_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda @@ -557,32 +571,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hb1e8313_1004.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/giflib-5.2.2-h10d778d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/glog-0.7.1-h2790a97_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf4-4.2.15-h8138101_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf5-1.14.3-nompi_h687a608_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/icu-75.1-h120a0e1_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.2.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.2.0-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh57ce528_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.17-h6253ea5_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda @@ -592,49 +606,49 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/kealib-1.5.3-hb2b617a_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/kiwisolver-1.4.5-py312h49ebfd2_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/kealib-1.5.3-he475af8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/kiwisolver-1.4.7-py312hc5c4d5f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/krb5-1.21.3-h37d8d59_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lcms2-2.16-ha2f27b4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lerc-4.0.0-hb486fe8_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240116.2-cxx17_hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libaec-1.1.3-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.4-h20e244c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h2952479_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-ha60c65e_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libblas-3.9.0-22_osx64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h0dc2134_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcrc32c-1.1.2-he49afe7_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.9.1-hfcf2730_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-heced48a_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.0-h58e7537_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.21-hfdf4475_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libevent-2.1.12-ha90c15b_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.3-hac325c4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h4b9bb65_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h5d197d2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h385febf_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-h86719f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-h513f0eb_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hc5f35ca_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-h3b8d0bf_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-h3127c03_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-ha7d2355_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h1b48671_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h1b48671_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-ha63beff_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-h597966e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h26ecb72_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7ffd8cf_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7ffd8cf_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-13_2_0_h97931a8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-13.2.0-h2873a65_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.80.3-h736d271_2.conda @@ -644,39 +658,39 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.17-hd75f5a5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.22.5-hdfe23c8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libjpeg-turbo-3.0.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libkml-1.3.0-hfcbc525_1020.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libkml-1.3.0-h9ee1731_1021.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/liblapack-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libllvm14-14.0.6-hc8e404f_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnetcdf-4.9.2-nompi_h7334405_114.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libopenblas-0.3.27-openmp_h8869122_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.43-h92b6c6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h4501773_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h75a757a_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-4.25.3-h4e4d658_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.09.01-h81f5012_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-he2ba7a0_16.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.18-hbcb3906_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.20-hfdf4475_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hdc25a2c_9.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.0-h1b8f9f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.1-h4b8f8c9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.0-hd019ec5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h87f9345_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h75589b3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.6.0-h603087a_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libutf8proc-2.8.0-hb7f2c08_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libwebp-base-1.4.0-h10d778d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h0dc2134_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h00291cd_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libxml2-2.12.7-heaf3512_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.10.1-hc158999_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-18.1.8-h15ab845_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hdeb90da_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hcc8fd36_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h904eaf1_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h83408cd_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-c-1.9.4-hf0c8a7f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lzo-2.10-h10d778d_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312h41838bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h0d5aeb7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -684,58 +698,58 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/minizip-4.0.7-h62b0c8d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.0.8-py312hc3c9ca0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.1.0-py312hc5c4d5f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.2-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h5846eda_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.103-he7eb89d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.104-h3135457_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py312hc3b515d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.1-py312h8813227_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.2-py312he4d506f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openjpeg-2.5.2-h7310d3a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h8847cbe_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-h87427d6_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h732d5f6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.2-hd23fc13_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h22b2039_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.2-py312h1171441_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240807-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.3-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2.1-h694c41f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pcre2-10.44-h7634a1b_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312hbd70edc_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312h683ea77_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.43.4-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h744cbf2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h65860a0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h9b73963_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h4b98a8f_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.4.1-hf92c781_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda @@ -743,16 +757,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-core-17.0.0-py312h63b501a_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.20.1-py312ha47ea1c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.3-py312h669792a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312he77c50b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312he77c50b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312h43b3a95_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312ha320102_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312hab44e94_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312hab44e94_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312hcd3578f_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312haf32e09_9.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda @@ -761,13 +775,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hbd25219_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.1.0-py312h7a17523_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/qhull-2020.2-h3c5361c_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.55-h694c41f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.10-py312h1c98354_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.57-h694c41f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.11-py312h4104ae2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.09.01-hb168e87_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda @@ -776,16 +790,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.7.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312ha47ea1c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.1-py312h8b25c6c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.1-py312hc214ba5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.0-py312hb9702fa_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312h669792a_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.4-py312he6c0bb9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.2-py312h9d777eb_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-72.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.5-py312h594820c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h8047845_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/snappy-1.2.1-he1e6707_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda @@ -794,57 +808,57 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/spdlog-1.14.1-h325aa07_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sphobjinv-2.3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/sqlite-3.46.0-h28673e1_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/sqlite-3.46.1-he26b093_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.25.0-h6b8956e_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.0-h313d0e2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240712-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/typst-0.11.0-h11a7dfb_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024a-h10d778d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312h49ebfd2_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024b-h00291cd_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312hc5c4d5f_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/uriparser-0.9.8-h6aefe2f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-4.0.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.2-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312h41838bb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-hfb503d4_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h0dc2134_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hde137ed_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hb33e954_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-h87427d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h331e495_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h7122b0e_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.6-h915ae27_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl - pypi: src/bokeh_helpers @@ -857,25 +871,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312he70551f_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.25-h7db803d_6.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.3-ha1e9ad3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.27-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.18-ha1e9ad3_10.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.2-hf2a634e_21.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.7-h6bd9195_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h8d4122e_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h827f298_17.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.4-h21392f2_8.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-ha1e9ad3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-ha1e9ad3_10.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.27.5-h1307057_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h21b9f41_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.29-hf1f9119_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hb6a8f00_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.8-heca9ddf_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h4d6445f_19.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.5-h184cd82_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-hf1fc857_11.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.2-hcae1b89_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h76bae87_9.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.13.0-haf5610f_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.8.0-h148e6f0_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.12.0-hf03c1c4_0.conda @@ -885,20 +899,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-h85f69ea_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h53d5487_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.0-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.7.4-h56e8100_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.1-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cairo-1.18.0-h32b962e_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.0-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-hc2ea260_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda @@ -908,32 +922,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.2.1-py312h0d7def4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.3-py312he70551f_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/dart-sass-1.58.3-h57928b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.11-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/deno-1.41.0-h1f5608b_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/deno-dom-0.1.35-h8b8d39b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/double-conversion-3.3.0-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.23.1-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.2-h63175ca_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.9.6-py312h6629543_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.3-he0c23c2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.0-py312hd215820_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/fmt-11.0.2-h7f575de_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 @@ -943,12 +957,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.2-hbde0cde_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-hdaf720e_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/freexl-2.0.0-h8276f4a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda @@ -957,7 +971,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.12.2-h5a68840_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h232476a_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-9.0.0-h2bedf89_1.conda @@ -965,25 +979,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/hdf5-1.14.3-nompi_h2b43c12_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/icu-75.1-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.2.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.2.0-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2024.2.1-h57928b3_1083.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh7428d3b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda @@ -993,47 +1007,47 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.3-h6c43f9b_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.5-py312h0d7def4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.3-h6c43f9b_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.7-py312hd5eb7cc_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lcms2-2.16-h67d730c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240116.2-cxx17_he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.4-haf234dc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h6e8cf4f_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h29daf90_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-23_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-hcfcfb64_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-23_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.9.1-h18fefc2_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.0-h1ee3ff0_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.21-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.2-h63175ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.3-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h6b59ad6_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hefbb53f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h2fd8da2_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.80.3-h7025463_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.28.0-h5e7cea3_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.28.0-he5eb982_0.conda @@ -1042,31 +1056,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-hcfcfb64_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-3.0.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1020.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-23_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h92078aa_114.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.43-h19919ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-4.25.3-h503648d_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.09.01-hf8d8778_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-h6c42fcb_16.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.18-h8d14728_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-hab0cb6d_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.0-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libssh2-1.11.0-h7dfc565_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-ha2b3283_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-hbe90ef8_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.6.0-hb151862_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libutf8proc-2.8.0-h82a8f57_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.4.0-hcfcfb64_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-hcd874cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-h013a479_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxml2-2.12.7-h0f24e4e_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxslt-1.1.39-h3df6e99_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.10.1-h1d365fa_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h594ca44_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h0608a1d_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.4-hcfcfb64_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 @@ -1076,7 +1090,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312he70551f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -1085,11 +1099,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/minizip-4.0.6-hb638d1e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.1.0-h66d3029_694.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.0.8-py312hd5eb7cc_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda @@ -1099,41 +1113,41 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py312hcccf92d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.1-py312h49bc9c5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.2-py312h49bc9c5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.2-h3d672ee_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.1-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.2-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h784c2ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.2-py312h72972c8_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240807-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.3-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2.1-h57928b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pcre2-10.44-h3d7b363_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.43.4-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h686f694_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h9415970_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.4.1-hd9569ee_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/pthreads-win32-2.9.1-hfa6e2cd_3.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda @@ -1141,15 +1155,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-17.0.0-py312h6a9c419_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.20.1-py312h2615798_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.3-py312h2615798_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_8.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_3.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda @@ -1158,16 +1172,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-306-py312h53d5487_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h53d5487_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.1.0-py312hd7027bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.55-h57928b3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.10-py312he4a2ebf_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.57-h57928b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.11-py312he4a2ebf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.09.01-hd3b24a8_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda @@ -1175,16 +1189,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.7.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.1-py312h7a6832a_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.1-py312h816cc57_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.0-py312h1f4e10d_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.4-py312h881003e_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.2-py312h816cc57_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-72.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.5-py312h3a88d77_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h3a88d77_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/snappy-1.2.1-h23299a8_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda @@ -1193,42 +1207,42 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/spdlog-1.14.1-h9f2357e_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sphobjinv-2.3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.0-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.12.0-hc790b64_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-hc790b64_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.25.0-h3c7d8a4_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.0-h98a567f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240712-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/typst-0.11.0-h975169c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312h0d7def4_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312hd5eb7cc_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/uriparser-0.9.8-h5a68840_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_20.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-4.0.1-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_21.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_21.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_21.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.2-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda @@ -1236,20 +1250,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312he70551f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-hcd874cb_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl - pypi: src/bokeh_helpers @@ -1271,25 +1285,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h98912ed_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.25-h15d0e8c_6.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.3-h8dac057_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.27-h4bc722e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.18-h038f3f9_10.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.2-h570d160_21.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.7-ha1f794c_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h0040ed1_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-hc14a930_17.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.4-h558cea2_8.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h038f3f9_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h038f3f9_10.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.27.5-hd0b8a3b_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h7dc8893_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.29-h03582ad_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.8-h5e77a74_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-hc2627b9_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h01636a3_19.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-h191b246_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h29c84ef_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda @@ -1300,20 +1314,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h30efb56_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.0-ha66036c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.0-py312h1671c18_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda @@ -1323,33 +1337,33 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.1-py312h8572e83_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py312h98912ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dart-sass-1.58.3-ha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.11-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312hca68cad_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312h2ec8cdc_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-1.41.0-hfc7925d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-dom-0.1.35-hd9586b0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.23.1-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.6-py312h96884de_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.0-py312h5aa26c2_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 @@ -1359,12 +1373,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h7eda2e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda @@ -1376,7 +1390,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda @@ -1384,25 +1398,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.2.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.2.0-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda @@ -1412,12 +1426,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hee9dde6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hf8d3e68_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py312h8572e83_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda @@ -1425,52 +1439,58 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8756180_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-he02047a_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-he02047a_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hc9a23c6_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-23_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-23_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.0-hbbe4b11_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.122-h4ab18f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hba09cee_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-hdd6600c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-h5f34788_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-ha39a594_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-ha2ed5f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h2ebfdf0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h2b45729_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-h94e7027_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h0fa2cb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-he047751_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-he047751_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h9d8aadb_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h062f1c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hbd1db40_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h151b34b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h151b34b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hbbc8833_1020.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-23_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm18-18.1.8-h8b73ec9_2.conda @@ -1478,38 +1498,39 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-haa1307c_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h482b261_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-hc0a3c3a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-hb90f79a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h46a8edc_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h9c5d478_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312h03f37cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h98912ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312h854627b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -1517,60 +1538,60 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.8-py312h2492b07_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.1-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.3.0-h70512c7_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.3.0-ha479ceb_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h70512c7_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-ha479ceb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.103-h593d115_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.1-py312h1103770_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.2-py312h58c1407_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h98912ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.2-py312h1d6d2e6_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240807-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.3-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2.1-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h287a98d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h56024de_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-hb0d391f_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-ha8faf9a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h9a8786e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda @@ -1578,15 +1599,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.20.1-py312hf008fa9_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.3-py312h12e396e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h8ad7a51_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h01329cd_8.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312hb5137db_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h5aa26c2_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312h91f0f75_3.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda @@ -1595,14 +1616,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h41a817b_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.1.0-py312h7ab5c7e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.55-ha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.10-py312hff7f44f_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.57-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda @@ -1610,17 +1631,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.7.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312hf008fa9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.1-py312hbe4c86d_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.0-h3400bea_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.1-py312h775a589_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.0-py312h499d17b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.4-py312hd18ad41_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.2-h7b32b05_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-72.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.5-py312h8413631_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h41a817b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda @@ -1629,54 +1650,56 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sphobjinv-2.3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.0-h6d4b2fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.1-h9eae976_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.25.0-h213c483_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h9a8786e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240712-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/typst-0.11.0-he8a937b_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024a-h3f72095_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h8572e83_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-4.0.1-py312h7900ff3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.0-h5291e77_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.2-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h98912ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.4-h4ab18f5_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h666cd97_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.42-h4ab18f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda @@ -1684,19 +1707,24 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-h4bc722e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-h4bc722e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-recordproto-1.14.2-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h75354e8_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312h3483029_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl - pypi: ../Ribasim/python/ribasim @@ -1711,25 +1739,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312h104f124_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312hb553811_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.25-h4880c77_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.3-hf37c103_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.27-hfdf4475_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.18-hf37c103_10.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.2-h324d61a_21.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.7-h3c4ec21_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hdc1c6f6_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-h93740dc_17.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.4-hd06a241_8.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-hf37c103_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-hf37c103_10.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.27.5-hfadd0b3_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-hae762b9_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.29-h2dfa2de_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hf6f7cdd_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.8-h2f86973_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-he4b61a0_19.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.5-h915d0f8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-h8128ea2_11.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.2-h27d4fa7_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-h7a58a96_9.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.13.0-hf8dbe3c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.8.0-h60298e3_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.12.0-h646f05d_0.conda @@ -1740,20 +1768,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/blosc-1.21.6-h7d75f6d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312heafc425_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312h5861a67_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.0-h51dda26_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cairo-1.18.0-h37bd5c4_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.0-py312h9620c06_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.1-py312hf857d28_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-ha105788_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda @@ -1763,31 +1791,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.2.1-py312h9230928_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-0.12.3-py312h41838bb_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/dart-sass-1.58.3-h694c41f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.11-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h28f332c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h5861a67_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-1.41.0-h86af993_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-dom-0.1.35-h08cba0f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.23.1-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.2-h73e2aa4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.9.6-py312hfc836c0_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.3-hac325c4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.0-py312hcd3578f_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/fmt-11.0.2-h3c5361c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 @@ -1797,12 +1825,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/fontconfig-2.14.2-h5bb23bf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/freetype-2.12.1-h60636b9_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/freexl-2.0.0-h3ec172f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h29648be_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda @@ -1813,32 +1841,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hb1e8313_1004.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/giflib-5.2.2-h10d778d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/glog-0.7.1-h2790a97_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf4-4.2.15-h8138101_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf5-1.14.3-nompi_h687a608_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/icu-75.1-h120a0e1_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.2.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.2.0-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh57ce528_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.17-h6253ea5_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda @@ -1848,49 +1876,49 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/kealib-1.5.3-hb2b617a_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/kiwisolver-1.4.5-py312h49ebfd2_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/kealib-1.5.3-he475af8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/kiwisolver-1.4.7-py312hc5c4d5f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/krb5-1.21.3-h37d8d59_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lcms2-2.16-ha2f27b4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lerc-4.0.0-hb486fe8_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240116.2-cxx17_hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libaec-1.1.3-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.4-h20e244c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h2952479_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-ha60c65e_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libblas-3.9.0-22_osx64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h0dc2134_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h00291cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcrc32c-1.1.2-he49afe7_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.9.1-hfcf2730_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-heced48a_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.0-h58e7537_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.21-hfdf4475_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libevent-2.1.12-ha90c15b_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.3-hac325c4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h4b9bb65_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h5d197d2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h385febf_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-h86719f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-h513f0eb_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hc5f35ca_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-h3b8d0bf_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-h3127c03_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-ha7d2355_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h1b48671_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h1b48671_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-ha63beff_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-h597966e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h26ecb72_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7ffd8cf_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7ffd8cf_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-13_2_0_h97931a8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-13.2.0-h2873a65_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.80.3-h736d271_2.conda @@ -1900,39 +1928,39 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.17-hd75f5a5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.22.5-hdfe23c8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libjpeg-turbo-3.0.0-h0dc2134_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libkml-1.3.0-hfcbc525_1020.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libkml-1.3.0-h9ee1731_1021.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/liblapack-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libllvm14-14.0.6-hc8e404f_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnetcdf-4.9.2-nompi_h7334405_114.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libopenblas-0.3.27-openmp_h8869122_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.43-h92b6c6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h4501773_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h75a757a_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-4.25.3-h4e4d658_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.09.01-h81f5012_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-he2ba7a0_16.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.18-hbcb3906_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.20-hfdf4475_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hdc25a2c_9.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.0-h1b8f9f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.1-h4b8f8c9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.0-hd019ec5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h87f9345_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h75589b3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.6.0-h603087a_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libutf8proc-2.8.0-hb7f2c08_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libwebp-base-1.4.0-h10d778d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h0dc2134_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h00291cd_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libxml2-2.12.7-heaf3512_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.10.1-hc158999_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-18.1.8-h15ab845_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hdeb90da_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hcc8fd36_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h904eaf1_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h83408cd_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-c-1.9.4-hf0c8a7f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lzo-2.10-h10d778d_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312h41838bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h0d5aeb7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -1940,58 +1968,58 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/minizip-4.0.7-h62b0c8d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.0.8-py312hc3c9ca0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.1.0-py312hc5c4d5f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.2-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h5846eda_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.103-he7eb89d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.104-h3135457_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py312hc3b515d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.1-py312h8813227_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.2-py312he4d506f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openjpeg-2.5.2-h7310d3a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h8847cbe_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-h87427d6_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h732d5f6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.2-hd23fc13_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h22b2039_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.2-py312h1171441_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240807-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.3-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2.1-h694c41f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pcre2-10.44-h7634a1b_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312hbd70edc_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312h683ea77_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.43.4-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h744cbf2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h65860a0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h9b73963_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h4b98a8f_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.4.1-hf92c781_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda @@ -1999,16 +2027,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-core-17.0.0-py312h63b501a_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.20.1-py312ha47ea1c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.3-py312h669792a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312he77c50b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312he77c50b_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312h43b3a95_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312ha320102_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312hab44e94_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312hab44e94_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312hcd3578f_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312haf32e09_9.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda @@ -2017,13 +2045,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hbd25219_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.1.0-py312h7a17523_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/qhull-2020.2-h3c5361c_5.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.55-h694c41f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.10-py312h1c98354_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.57-h694c41f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.11-py312h4104ae2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.09.01-hb168e87_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda @@ -2031,16 +2059,16 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.7.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312ha47ea1c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.1-py312h8b25c6c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.1-py312hc214ba5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.0-py312hb9702fa_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312h669792a_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.4-py312he6c0bb9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.2-py312h9d777eb_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-72.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.5-py312h594820c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h8047845_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/snappy-1.2.1-he1e6707_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda @@ -2049,57 +2077,57 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/spdlog-1.14.1-h325aa07_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sphobjinv-2.3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/sqlite-3.46.0-h28673e1_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/sqlite-3.46.1-he26b093_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.25.0-h6b8956e_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.0-h313d0e2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240712-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/typst-0.11.0-h11a7dfb_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024a-h10d778d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312h49ebfd2_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024b-h00291cd_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312hc5c4d5f_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/uriparser-0.9.8-h6aefe2f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-4.0.1-py312hbd25219_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.2-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312h41838bb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-hfb503d4_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h0dc2134_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hde137ed_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hb33e954_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-h87427d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h331e495_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h7122b0e_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.6-h915ae27_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl - pypi: ../Ribasim/python/ribasim @@ -2113,25 +2141,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312he70551f_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.25-h7db803d_6.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.3-ha1e9ad3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.27-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.18-ha1e9ad3_10.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.2-hf2a634e_21.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.7-h6bd9195_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h8d4122e_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h827f298_17.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.4-h21392f2_8.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-ha1e9ad3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-ha1e9ad3_10.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.27.5-h1307057_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h21b9f41_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.29-hf1f9119_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hb6a8f00_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.8-heca9ddf_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h4d6445f_19.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.5-h184cd82_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-hf1fc857_11.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.2-hcae1b89_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h76bae87_9.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.13.0-haf5610f_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.8.0-h148e6f0_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.12.0-hf03c1c4_0.conda @@ -2141,20 +2169,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-h85f69ea_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h53d5487_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.0-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.7.4-h56e8100_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.1-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cairo-1.18.0-h32b962e_3.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.0-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-hc2ea260_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda @@ -2164,32 +2192,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.2.1-py312h0d7def4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.3-py312he70551f_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/dart-sass-1.58.3-h57928b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.11-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/deno-1.41.0-h1f5608b_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/deno-dom-0.1.35-h8b8d39b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/double-conversion-3.3.0-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.23.1-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.2-h63175ca_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.9.6-py312h6629543_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.3-he0c23c2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.0-py312hd215820_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/fmt-11.0.2-h7f575de_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 @@ -2199,12 +2227,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.2-hbde0cde_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-hdaf720e_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/freexl-2.0.0-h8276f4a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda @@ -2213,7 +2241,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.12.2-h5a68840_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h232476a_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-9.0.0-h2bedf89_1.conda @@ -2221,25 +2249,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/hdf5-1.14.3-nompi_h2b43c12_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/icu-75.1-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.2.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.2.0-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2024.2.1-h57928b3_1083.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh7428d3b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda @@ -2249,47 +2277,47 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.3-h6c43f9b_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.5-py312h0d7def4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.3-h6c43f9b_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.7-py312hd5eb7cc_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lcms2-2.16-h67d730c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240116.2-cxx17_he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.4-haf234dc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h6e8cf4f_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_8_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h29daf90_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-23_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-hcfcfb64_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-23_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.9.1-h18fefc2_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.0-h1ee3ff0_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.21-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.2-h63175ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.3-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h6b59ad6_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hefbb53f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h2fd8da2_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.80.3-h7025463_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.28.0-h5e7cea3_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.28.0-he5eb982_0.conda @@ -2298,31 +2326,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-hcfcfb64_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-3.0.0-hcfcfb64_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1020.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-23_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h92078aa_114.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_8_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_13_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.43-h19919ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-4.25.3-h503648d_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.09.01-hf8d8778_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-h6c42fcb_16.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.18-h8d14728_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-hab0cb6d_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.0-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libssh2-1.11.0-h7dfc565_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-ha2b3283_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-hbe90ef8_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.6.0-hb151862_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libutf8proc-2.8.0-h82a8f57_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.4.0-hcfcfb64_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-hcd874cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-h013a479_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxml2-2.12.7-h0f24e4e_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxslt-1.1.39-h3df6e99_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.10.1-h1d365fa_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h594ca44_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h0608a1d_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.4-hcfcfb64_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 @@ -2332,7 +2360,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312he70551f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -2341,11 +2369,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/minizip-4.0.6-hb638d1e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.1.0-h66d3029_694.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.0.8-py312hd5eb7cc_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda @@ -2355,41 +2383,41 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py312hcccf92d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.1-py312h49bc9c5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.2-py312h49bc9c5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.2-h3d672ee_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.1-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.2-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h784c2ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.2-py312h72972c8_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240807-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.3-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2.1-h57928b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pcre2-10.44-h3d7b363_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.43.4-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h686f694_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h9415970_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.4.1-hd9569ee_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/pthreads-win32-2.9.1-hfa6e2cd_3.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda @@ -2397,15 +2425,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-17.0.0-py312h6a9c419_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.20.1-py312h2615798_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.3-py312h2615798_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_8.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_3.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda @@ -2414,32 +2442,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-306-py312h53d5487_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h53d5487_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.1.0-py312hd7027bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.55-h57928b3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.10-py312he4a2ebf_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.57-h57928b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.11-py312he4a2ebf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.09.01-hd3b24a8_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.7.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.1-py312h7a6832a_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.1-py312h816cc57_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.0-py312h1f4e10d_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.4-py312h881003e_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.2-py312h816cc57_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-72.1.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.5-py312h3a88d77_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h3a88d77_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/snappy-1.2.1-h23299a8_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda @@ -2448,42 +2476,42 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/spdlog-1.14.1-h9f2357e_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/sphobjinv-2.3.1.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.0-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.12.0-hc790b64_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-hc790b64_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.25.0-h3c7d8a4_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.0-h98a567f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240712-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/typst-0.11.0-h975169c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312h0d7def4_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312hd5eb7cc_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/uriparser-0.9.8-h5a68840_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_20.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-4.0.1-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_21.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_21.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_21.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.2-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda @@ -2491,20 +2519,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312he70551f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-hcd874cb_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl - pypi: ../Ribasim/python/ribasim @@ -2676,64 +2704,66 @@ packages: - kind: conda name: argon2-cffi-bindings version: 21.2.0 - build: py312h104f124_4 - build_number: 4 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312h104f124_4.conda - sha256: aa321e91f0ff365b5261fa1dcffa2d32aa957561bdbb38988e52e28e25a762a8 - md5: dddfb6125aed1fb84eb13319007c08fd + build: py312h4389bb4_5 + build_number: 5 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda + sha256: 8764a8a9416d90264c7d36526de77240a454d0ee140841db545bdd5825ebd6f1 + md5: 53943e7ecba6b3e3744b292dc3fb4ae2 depends: - cffi >=1.0.1 - - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/argon2-cffi-bindings?source=hash-mapping - size: 32556 - timestamp: 1695387174872 + size: 34399 + timestamp: 1725357069475 - kind: conda name: argon2-cffi-bindings version: 21.2.0 - build: py312h98912ed_4 - build_number: 4 + build: py312h66e93f0_5 + build_number: 5 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h98912ed_4.conda - sha256: 8ddb4a586bc128f1b9484f82c5cb0226340527fbfe093adf3b76b7e755e11477 - md5: 00536e0a1734dcde9815fe227f32fc5a + url: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda + sha256: 3cbc3b026f5c3f26de696ead10607db8d80cbb003d87669ac3b02e884f711978 + md5: 1505fc57c305c0a3174ea7aae0a0db25 depends: + - __glibc >=2.17,<3.0.a0 - cffi >=1.0.1 - - libgcc-ng >=12 - - python >=3.12.0rc3,<3.13.0a0 + - libgcc >=13 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - pkg:pypi/argon2-cffi-bindings?source=hash-mapping - size: 35142 - timestamp: 1695386704886 + size: 34847 + timestamp: 1725356749774 - kind: conda name: argon2-cffi-bindings version: 21.2.0 - build: py312he70551f_4 - build_number: 4 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312he70551f_4.conda - sha256: 4c3c428b994400ca753d9d0adbb11ce2d2a87f4dacd86c91d6cf985c5d89a3e1 - md5: 69b7a1d899d46b91f8eecab9abf9728c + build: py312hb553811_5 + build_number: 5 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312hb553811_5.conda + sha256: 37d61df3778b99e12d8adbaf7f1c5e8b07616ef3ada4436ad995f25c25ae6fda + md5: 033345df1d545bc40b52e03cb03db4e0 depends: + - __osx >=10.13 - cffi >=1.0.1 - - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/argon2-cffi-bindings?source=hash-mapping - size: 34750 - timestamp: 1695387347676 + size: 31898 + timestamp: 1725356938246 - kind: conda name: arrow version: 1.3.0 @@ -2820,65 +2850,65 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/attrs?source=compressed-mapping + - pkg:pypi/attrs?source=hash-mapping size: 56048 timestamp: 1722977241383 - kind: conda name: aws-c-auth - version: 0.7.25 - build: h15d0e8c_6 - build_number: 6 + version: 0.7.29 + build: h03582ad_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.25-h15d0e8c_6.conda - sha256: 0680ca18238e17d319f87bb8390d116292592c6f5534c66404542665d6149fae - md5: e0d292ba383ac09598c664186c0144cd + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.29-h03582ad_1.conda + sha256: 97379dd69b78e5b07a4a776bccb5835aa71f170912385e71ddba5cc93d9085dc + md5: 6d23dd1c1742112d5fe9f529da7afea9 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - libgcc-ng >=12 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 107286 - timestamp: 1723725324766 + size: 107282 + timestamp: 1725868193209 - kind: conda name: aws-c-auth - version: 0.7.25 - build: h4880c77_6 - build_number: 6 + version: 0.7.29 + build: h2dfa2de_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.25-h4880c77_6.conda - sha256: 31085909daaea5bc3d8288577a4f63de57a4bd7ab6510d0946c03b70edc1745d - md5: 31c5fb2e092df17d88d315f1deca5af6 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.29-h2dfa2de_1.conda + sha256: a75f56a0d258a837f555c63a5d621e10497e6026c667b919a218038b9ad18647 + md5: e297a166392146d9e3fe3118550b9ff3 depends: - __osx >=10.13 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 94011 - timestamp: 1723725415937 + size: 94284 + timestamp: 1725868368256 - kind: conda name: aws-c-auth - version: 0.7.25 - build: h7db803d_6 - build_number: 6 + version: 0.7.29 + build: hf1f9119_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.25-h7db803d_6.conda - sha256: 9dcb467b8f6b33c44fb4644501bcfe8dd0df9cd066b27e725aae22688a3fda01 - md5: c17d2724a6f73adcaaf39d1271ca20b1 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.29-hf1f9119_1.conda + sha256: 617b3aa9cea4d1107a0809e0bc85ed60a7c6095a4992af9c08e97492cc65fa56 + md5: 8f3aa5632a78884b7f788e9d0fee03f3 depends: - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - ucrt >=10.0.20348.0 @@ -2887,38 +2917,37 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 102584 - timestamp: 1723725722832 + size: 102681 + timestamp: 1725868656049 - kind: conda name: aws-c-cal - version: 0.7.3 - build: h8dac057_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.3-h8dac057_2.conda - sha256: bfd4f73855e926e6c7c9db700a17ef3ddb0e848b85edd04d766d50a008835407 - md5: 577509458a061ddc9b089602ac6e1e98 + version: 0.7.4 + build: h8128ea2_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda + sha256: 6ffa143181fa40bbbe1b5dfad149b68e4c3fcb6e5d38a4f5a4490c8c3b4402df + md5: 195ef3e2d7dadb02a4b1f874a1e5e1e6 depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - libgcc-ng >=12 + - __osx >=10.13 + - aws-c-common >=0.9.28,<0.9.29.0a0 - openssl >=3.3.1,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 47317 - timestamp: 1723674520890 + size: 39204 + timestamp: 1725829973 - kind: conda name: aws-c-cal - version: 0.7.3 - build: ha1e9ad3_2 - build_number: 2 + version: 0.7.4 + build: hf1fc857_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.3-ha1e9ad3_2.conda - sha256: 1bb4570afeb28f6b4693b4cb037d823308b40020b779f42d508d42dab0b44eaa - md5: c763ff9c4c712558ef72a460e4a2dbae + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda + sha256: f7ea9d52f759775dde2a39e1a2325e4659bfb2859f7a45798323c7cb00ed2770 + md5: 7c01760e07f867666662a4d91e998308 depends: - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 - openssl >=3.3.1,<4.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -2926,34 +2955,50 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 47054 - timestamp: 1723675012781 + size: 46848 + timestamp: 1725830274457 - kind: conda name: aws-c-cal - version: 0.7.3 - build: hf37c103_2 - build_number: 2 + version: 0.7.4 + build: hfd43aa1_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda + sha256: 8c8100499b7fced0c6a5eea156e85994d3bb0702b30eecedd949d555ca11f6a8 + md5: f301eb944d297fc879c441fffe461d8a + depends: + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - libgcc >=13 + - openssl >=3.3.1,<4.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 47532 + timestamp: 1725829965837 +- kind: conda + name: aws-c-common + version: 0.9.28 + build: h00291cd_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.3-hf37c103_2.conda - sha256: b008a16ac17edb0f5ecae88ee512ca6a1d5fdee3e4068eb73bd9656e7844ee3c - md5: 741ce62d05d8f62a74644c75f0179615 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda + sha256: 9af8c4514526829de390bc5f5c103487dff1cd025463ea90b7f8dbb8f1d0ff16 + md5: ffe8898e6d97ecb791df1350ce273508 depends: - __osx >=10.13 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - openssl >=3.3.1,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 39391 - timestamp: 1723674565375 + size: 225877 + timestamp: 1725670122224 - kind: conda name: aws-c-common - version: 0.9.27 + version: 0.9.28 build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.27-h2466b09_0.conda - sha256: 6d59ceca0f648ef0e6d6bce00dc3824a9340bf8fbffeafc80d4dbd230860579b - md5: 8355fbefc33c680fa1a96ba7d3365dfa + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda + sha256: 102e955695d4b996753773552820b18b6d0c1f8d77ac0412041341bece100815 + md5: 3ffb0664a913a557bf89ed1834d0c12c depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -2961,145 +3006,110 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 235039 - timestamp: 1723640170113 + size: 233724 + timestamp: 1725670503118 - kind: conda name: aws-c-common - version: 0.9.27 - build: h4bc722e_0 + version: 0.9.28 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.27-h4bc722e_0.conda - sha256: b1725a5ec43bcf606d6bdb248312aa51386b30339dd83a1f16edf620fe03d941 - md5: 817119e8a21a45d325f65d0d54710052 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda + sha256: febe894ae2f5bfc4d65c51bd058433e9061d994ff06b30d5eca18919639c5083 + md5: 1b53af320b24547ce0fb8196d2604542 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: Apache-2.0 - license_family: Apache - purls: [] - size: 236759 - timestamp: 1723639577027 -- kind: conda - name: aws-c-common - version: 0.9.27 - build: hfdf4475_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.27-hfdf4475_0.conda - sha256: 3420001537d36a20c81c0a832f95feec849bc50cec4429025498498de8c6be0a - md5: 3248125bfac52e553ebb6d010176cc1a - depends: - - __osx >=10.13 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 225349 - timestamp: 1723639748928 + size: 236451 + timestamp: 1725670076853 - kind: conda name: aws-c-compression - version: 0.2.18 - build: h038f3f9_10 - build_number: 10 + version: 0.2.19 + build: h756ea98_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.18-h038f3f9_10.conda - sha256: d8911cff9a3a61af5dc2b30e6d5a3b79f269f0c6af2854a5315bc248c7b5f8a2 - md5: 76b09778c1bd489de8691349fd4a73d0 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda + sha256: 0e7fd40a9f8aa235e78202af75a421a7f6ea589e30c5cbe1787ceaccf36a3ce9 + md5: 5e08c385a1b8a79b52012b74653bbb99 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - libgcc-ng >=12 - license: Apache-2.0 - license_family: Apache - purls: [] - size: 19017 - timestamp: 1723674561003 -- kind: conda - name: aws-c-compression - version: 0.2.18 - build: ha1e9ad3_10 - build_number: 10 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.18-ha1e9ad3_10.conda - sha256: 4d9a6c0d98d2f70158cd8c35fffeeb163b6d9ed51cdf71b17c10b369d6b04d03 - md5: 303ba17cb3c685e15ddc17e7f9774772 - depends: - - aws-c-common >=0.9.27,<0.9.28.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 22487 - timestamp: 1723675004991 + size: 19116 + timestamp: 1725829968483 - kind: conda name: aws-c-compression - version: 0.2.18 - build: hf37c103_10 - build_number: 10 + version: 0.2.19 + build: h8128ea2_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.18-hf37c103_10.conda - sha256: 03ea1d1d1c64f691610ed13e8435522af729819b1ae5698134a8a71103c2aafa - md5: 42578f83cdd0023f2fa09274cc219a44 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda + sha256: f60f8bec5eddd1974367aac03a646996374d8f290bb4463dfbf1e7620462e7be + md5: 43be0637437461d48ff524c04459ee46 depends: - __osx >=10.13 - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 17949 - timestamp: 1723674703315 + size: 17936 + timestamp: 1725829971987 - kind: conda - name: aws-c-event-stream - version: 0.4.2 - build: h324d61a_21 - build_number: 21 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.2-h324d61a_21.conda - sha256: ab2d61092706426f9819230ef48a91d557f061694dc7e81b2c4b5c1a528bc9ff - md5: 13db8f3e82207ccf44502b4559da5ed0 + name: aws-c-compression + version: 0.2.19 + build: hf1fc857_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda + sha256: 0e5913b72e730644a9ea8b5ed8d8fbc32d288d202882a9ec089b64a18612dc31 + md5: 289e8943be0dce6b1abf60652bc1492e depends: - - __osx >=10.13 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 - - libcxx >=16 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 46641 - timestamp: 1723711400871 + size: 22447 + timestamp: 1725830398597 - kind: conda name: aws-c-event-stream - version: 0.4.2 - build: h570d160_21 - build_number: 21 + version: 0.4.3 + build: h235a6dd_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.2-h570d160_21.conda - sha256: 43fcaff53b6cbf554cae1949e421e15ee933570ddff4dd8d7522bc8e680cab37 - md5: f6f77c408f324ed20bba4b32cb04d875 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda + sha256: 987b3654e7cbb8ead0227c2442a02b6c379d21bb1509a834c423d492a4862706 + md5: c05358e3a231195f7f0b3f592078bb0c depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 53514 - timestamp: 1723711287620 + size: 53989 + timestamp: 1725856758424 - kind: conda name: aws-c-event-stream - version: 0.4.2 - build: hf2a634e_21 - build_number: 21 + version: 0.4.3 + build: hb6a8f00_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.2-hf2a634e_21.conda - sha256: cf6b72397b6bc6f201ce662695a6b6a12f4ecdca0ae90269a930c1f7d15292aa - md5: afde3d8ecc9caf1bc3d2b6396577ed71 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hb6a8f00_1.conda + sha256: 774c763717e58f02333c7f716f126d8bc4d5c0b9659be7be392d7d5cd1aa1d42 + md5: 83e9cebf2e055e5495b9583601ca2d0c depends: - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 - ucrt >=10.0.20348.0 @@ -3108,203 +3118,223 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 54798 - timestamp: 1723711554331 + size: 54527 + timestamp: 1725857386993 - kind: conda - name: aws-c-http - version: 0.8.7 - build: h3c4ec21_4 - build_number: 4 + name: aws-c-event-stream + version: 0.4.3 + build: hf6f7cdd_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.7-h3c4ec21_4.conda - sha256: 0ef0e979b190f4a17ec272a8d679d1ec9f52a3887ac3cff60d7cde421e40fa3d - md5: 47710a3e44f5131db646d38aab891d81 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hf6f7cdd_1.conda + sha256: 3a86d81ece111acc080cab42df6afc5c272c4ee7495d8cda22c90fc54bb0f27e + md5: 6f1d1e8b410d31a11db29d802f21cb64 depends: - __osx >=10.13 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-compression >=0.2.18,<0.2.19.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-checksums >=0.1.18,<0.1.19.0a0 + - libcxx >=17 license: Apache-2.0 license_family: Apache purls: [] - size: 164440 - timestamp: 1723711297899 + size: 46628 + timestamp: 1725856844781 - kind: conda name: aws-c-http - version: 0.8.7 - build: h6bd9195_4 - build_number: 4 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.7-h6bd9195_4.conda - sha256: bf1af1c437c67429e83fefed87d63843d5935296b6ab75d9e744584049606353 - md5: 7b80ad72c9a0f208eda3363e391b0106 + version: 0.8.8 + build: h2f86973_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.8-h2f86973_2.conda + sha256: ed4350ada258ea8127a1d6af681e109956c3258aeb3e7e81f9e3d03881e91c5e + md5: a4fa477bc4b23b11f5a8f6b0e3a9ca97 depends: - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-compression >=0.2.18,<0.2.19.0a0 + - __osx >=10.13 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-compression >=0.2.19,<0.2.20.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 182267 - timestamp: 1723711542833 + size: 164110 + timestamp: 1725856460863 - kind: conda name: aws-c-http - version: 0.8.7 - build: ha1f794c_4 - build_number: 4 + version: 0.8.8 + build: h5e77a74_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.7-ha1f794c_4.conda - sha256: 89f49970f60bd75a668273db036079615b3f8f4de4f67bf96e9e89d7977a1c73 - md5: b506fe315f908ea9b94036a1e5de5e6e + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.8-h5e77a74_2.conda + sha256: cef335beb17cd299024fae300653ae491c866f7c93287bdf44a9e9b4762b1a54 + md5: b75afaaf2a4ea0e1137ecb35262b8ed4 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-compression >=0.2.18,<0.2.19.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-compression >=0.2.19,<0.2.20.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - libgcc-ng >=12 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 197186 - timestamp: 1723711186801 + size: 197416 + timestamp: 1725856481663 - kind: conda - name: aws-c-io - version: 0.14.18 - build: h0040ed1_5 - build_number: 5 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h0040ed1_5.conda - sha256: 834f5bdf6ade6df5f437f5c81f8d5e28c7a4d591e13d5e81cfadc2682dd773d4 - md5: 2f6316f09b3152fecc2d34ab69508e6a + name: aws-c-http + version: 0.8.8 + build: heca9ddf_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.8-heca9ddf_2.conda + sha256: 9f151008584d7eb58b1184ac83015a5f8bc8e82cc4fa1e69d660e6260f79f4bc + md5: fcfd389b611656e45860e8e91ac70088 depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - libgcc-ng >=12 - - s2n >=1.5.0,<1.5.1.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-compression >=0.2.19,<0.2.20.0a0 + - aws-c-io >=0.14.18,<0.14.19.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 158738 - timestamp: 1723698313285 + size: 182376 + timestamp: 1725857088696 - kind: conda name: aws-c-io version: 0.14.18 - build: h8d4122e_5 - build_number: 5 + build: h3831a8d_9 + build_number: 9 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h8d4122e_5.conda - sha256: fd09a8b70cfa40507ceac9c8fa949da7c709c83ee0283290f2f11a01bcdf17ba - md5: 8cb89d13787c23f66bcc1f29351baa2a + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_9.conda + sha256: a861e2521e7c70322af6be848f7f9e9b984cfc7b4b6ec436340c08043d652ab2 + md5: 2b8d0c758a7cb3608b87f5aecdb152c7 depends: - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 160344 - timestamp: 1723698751903 + size: 160801 + timestamp: 1725843427442 - kind: conda name: aws-c-io version: 0.14.18 - build: hdc1c6f6_5 - build_number: 5 + build: hc2627b9_9 + build_number: 9 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-hc2627b9_9.conda + sha256: c39d321fb1b0388334f9a3fff1b867de624f455f3f01b7dba10b23bc040e8280 + md5: b1ba84c5cb2e6fe5f5cd1101097a4592 + depends: + - __glibc >=2.17,<3.0.a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - libgcc >=13 + - s2n >=1.5.2,<1.5.3.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 158670 + timestamp: 1725843016336 +- kind: conda + name: aws-c-io + version: 0.14.18 + build: hf9a0f1c_9 + build_number: 9 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hdc1c6f6_5.conda - sha256: 9f6b59a9672f49f0dbda71348aac28927256c42c36889d02693c50365ffb0047 - md5: 5f557bf492424b940648da9d8c2277d9 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_9.conda + sha256: a089493c67ec9e000061920f5a2ef233f59911d474bc77dcec0f4fb9738750ab + md5: c67eee7b35a3fa7a186d65a604a4a01f depends: - __osx >=10.13 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 138990 - timestamp: 1723698324671 + size: 138424 + timestamp: 1725843066014 - kind: conda name: aws-c-mqtt version: 0.10.4 - build: h827f298_17 - build_number: 17 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h827f298_17.conda - sha256: 9804a9027ff17955953ef934c48e7344db43f8b0d90a7c45fa5afeecb4f8ae9e - md5: e623bc494578a42b34d439472e20a9b2 + build: h01636a3_19 + build_number: 19 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h01636a3_19.conda + sha256: f188f9127e12b2f90d68c5887f9742838528d8ea64c11e25c90e135cc1465326 + md5: 8ec16206ccaaf74ee5830ffeba436ebc depends: - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 157632 - timestamp: 1723726738411 + size: 163865 + timestamp: 1725892070997 - kind: conda name: aws-c-mqtt version: 0.10.4 - build: h93740dc_17 - build_number: 17 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-h93740dc_17.conda - sha256: da059d67450af5aa87583f9ec231ed18ce481666f410e4964ffbde3ca0681a7f - md5: 3311cf7d8a088c450af9ee4b22972edd + build: h4d6445f_19 + build_number: 19 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h4d6445f_19.conda + sha256: 0dc65ecddda8d26390b2d1cb5db074739c74d47c94f5e0a3927f8431bd0912b5 + md5: edf26447a744762aa7ac8fe678e046ca depends: - - __osx >=10.13 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 139161 - timestamp: 1723726448430 + size: 157732 + timestamp: 1725892612990 - kind: conda name: aws-c-mqtt version: 0.10.4 - build: hc14a930_17 - build_number: 17 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-hc14a930_17.conda - sha256: bc176d82875700e0ee51ddaef188fd6ca2a44b2efc69e0cb434460319216049b - md5: f0e3f95a9f545d5975e8573f80cdb5fa + build: he4b61a0_19 + build_number: 19 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-he4b61a0_19.conda + sha256: 5d38c7493b28100b954ae1f7420e0876ad0209b99a84600de6d691a220f03e6e + md5: 3cacaf9254c92818cd32de10b3a7bafe depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - __osx >=10.13 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - libgcc-ng >=12 license: Apache-2.0 license_family: Apache purls: [] - size: 163877 - timestamp: 1723726327641 + size: 138915 + timestamp: 1725892131190 - kind: conda name: aws-c-s3 - version: 0.6.4 - build: h21392f2_8 - build_number: 8 + version: 0.6.5 + build: h184cd82_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.4-h21392f2_8.conda - sha256: 8137069560d3778e790618a2817eb0c5da4e1a97cd5ff91c778e365b3147b737 - md5: 15db0136544c378e00fd7158bf019a21 - depends: - - aws-c-auth >=0.7.25,<0.7.26.0a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.5-h184cd82_2.conda + sha256: 21ffdc5473041b92a5e581a775988cb59d5b1cbda707b63dc6fc28cefd3b8f25 + md5: df345266c40ab1a2ac3b79be8aa421a2 + depends: + - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 - ucrt >=10.0.20348.0 @@ -3313,314 +3343,314 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 107820 - timestamp: 1723739640662 + size: 108320 + timestamp: 1725882801691 - kind: conda name: aws-c-s3 - version: 0.6.4 - build: h558cea2_8 - build_number: 8 + version: 0.6.5 + build: h191b246_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.4-h558cea2_8.conda - sha256: 8206b63d89e1cf08a0e4bc4852cb15080bc9754df48acbc5e59fbe2ec50b3da8 - md5: af03e7b03e929396fb80ffac1a676c89 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-h191b246_2.conda + sha256: f43e6a308ae388e4a3968690ae8789e5cfb4d51c96d36a00c832a9067685b1d3 + md5: f8f40355dac7a75313d9c10de91330e7 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-auth >=0.7.25,<0.7.26.0a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 - - libgcc-ng >=12 - - openssl >=3.3.1,<4.0a0 + - libgcc >=13 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 111536 - timestamp: 1723739247330 + size: 112780 + timestamp: 1725882305631 - kind: conda name: aws-c-s3 - version: 0.6.4 - build: hd06a241_8 - build_number: 8 + version: 0.6.5 + build: h915d0f8_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.4-hd06a241_8.conda - sha256: 06764f43c21ecad9be0a546d4a8b798e353ede7058674c70ab4d259458b0e42e - md5: 192a67ed8897b6cfb844d83da17f3f36 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.5-h915d0f8_2.conda + sha256: b12778ac3bfa5574420472faee2944952c07067f1dc8cca832013edea1982b48 + md5: eb182c006b6eb87d523d51295c2e8050 depends: - __osx >=10.13 - - aws-c-auth >=0.7.25,<0.7.26.0a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 96550 - timestamp: 1723739435090 + size: 97417 + timestamp: 1725882369510 - kind: conda name: aws-c-sdkutils version: 0.1.19 - build: h038f3f9_2 - build_number: 2 + build: h756ea98_3 + build_number: 3 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h038f3f9_2.conda - sha256: 5612c9cad56662db50a1bcc2d8dca1fe273f7abad6f670fef328e4044beabc75 - md5: 6861cab6cddb5d713cb3db95c838d30f + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda + sha256: 4e6f79f3fee5ebb4fb12b6258d91315ed0f7a2ac16c75611cffdbaa0d54badb2 + md5: bfe6623096906d2502c78ccdbfc3bc7a depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - libgcc-ng >=12 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 55878 - timestamp: 1723691348466 + size: 55799 + timestamp: 1725836731034 - kind: conda name: aws-c-sdkutils version: 0.1.19 - build: ha1e9ad3_2 - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-ha1e9ad3_2.conda - sha256: c87e00f73686c3b43f0a2d6ff480f28d2a8f07811bad5e406bbe0ca8240bbba4 - md5: da087023762ab6dc62fd1d374b6cc30f + build: h8128ea2_3 + build_number: 3 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda + sha256: 50912641279d00a6ce12b1d72e74ea5d30078e91a0557a48a9e9fe285c2f6b2c + md5: 8d93b3603363214303737f74b6efb5da depends: - - aws-c-common >=0.9.27,<0.9.28.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __osx >=10.13 + - aws-c-common >=0.9.28,<0.9.29.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 54992 - timestamp: 1723691803596 + size: 50686 + timestamp: 1725836776385 - kind: conda name: aws-c-sdkutils version: 0.1.19 - build: hf37c103_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-hf37c103_2.conda - sha256: 7c1d055c1f67e4572de18e9daec81b74f59a6f77c2213746dab3cf12b5be253f - md5: a8f45839733a97c206cd5df6945c4a27 + build: hf1fc857_3 + build_number: 3 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda + sha256: 5e42bba0f1ffd1a1cc5b80f5abae03c7118809f4545c688e56c2bb5a0ee3740e + md5: b00e5b1b3985d9dfadde29e8b00f85e4 depends: - - __osx >=10.13 - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 50490 - timestamp: 1723691467686 + size: 55242 + timestamp: 1725837225397 - kind: conda name: aws-checksums version: 0.1.18 - build: h038f3f9_10 - build_number: 10 + build: h756ea98_11 + build_number: 11 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h038f3f9_10.conda - sha256: a94547ff766fb420c368bb8d4fd1c8d99b13088d176c43ad7bb7458ef47e45bc - md5: 4bf9c8fcf2bb6793c55e5c5758b9b011 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda + sha256: c343bc670bdb52248fc039cbd1cba20fe1d18af81960ab43153d9b55dfb08bc1 + md5: eadcc12bedac44f13223a2909c0e5bcc depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - libgcc-ng >=12 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 49839 - timestamp: 1723691467978 + size: 49962 + timestamp: 1725836852149 - kind: conda name: aws-checksums version: 0.1.18 - build: ha1e9ad3_10 - build_number: 10 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-ha1e9ad3_10.conda - sha256: d992544ad6ab4fc55bae80bdff9ab6c9d71b021c91297e835e3999b9cab4645c - md5: 93c84eec0955253bf07b5fbff95c6200 + build: h8128ea2_11 + build_number: 11 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-h8128ea2_11.conda + sha256: 37965af8d420d114a5d603d149b7e4ce353b119dffe90ec67c53895cb0e5c402 + md5: 45959482adbad4397bfedcdf262bbb32 depends: - - aws-c-common >=0.9.27,<0.9.28.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __osx >=10.13 + - aws-c-common >=0.9.28,<0.9.29.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 52177 - timestamp: 1723691948336 + size: 48884 + timestamp: 1725836961245 - kind: conda name: aws-checksums version: 0.1.18 - build: hf37c103_10 - build_number: 10 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-hf37c103_10.conda - sha256: e42c8e70a71e9bd7a228328a5b51efae0c15bd8ef7ed26fae238461a8335f699 - md5: 86fb971912f9222b14b0b3e695c52461 + build: hf1fc857_11 + build_number: 11 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-hf1fc857_11.conda + sha256: e2ef958fe20a98031d39489875e58eece6d5572257f46f5ffdacfed5e41f803e + md5: 5dbcecf902763167d36d9b1da70d4b5d depends: - - __osx >=10.13 - - aws-c-common >=0.9.27,<0.9.28.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 48683 - timestamp: 1723691504517 + size: 52209 + timestamp: 1725837389845 - kind: conda name: aws-crt-cpp - version: 0.27.5 - build: h1307057_7 - build_number: 7 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.27.5-h1307057_7.conda - sha256: 8812a9e7e789521626df6a488d94524d13d18559d7a96069802057765e85bd50 - md5: 4b49f5b85db0ad1b87f8a1ab1f7de4a0 - depends: - - aws-c-auth >=0.7.25,<0.7.26.0a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-event-stream >=0.4.2,<0.4.3.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + version: 0.28.2 + build: h27d4fa7_4 + build_number: 4 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.2-h27d4fa7_4.conda + sha256: ccdf92124ea1b0909164226b22932ad39ac80838d537ec960ed26f50f0680c7e + md5: 760a535c189a995ee99474027a87d1bb + depends: + - __osx >=10.13 + - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-event-stream >=0.4.3,<0.4.4.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-mqtt >=0.10.4,<0.10.5.0a0 - - aws-c-s3 >=0.6.4,<0.6.5.0a0 + - aws-c-s3 >=0.6.5,<0.6.6.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libcxx >=17 license: Apache-2.0 license_family: Apache purls: [] - size: 254234 - timestamp: 1723755473861 + size: 294389 + timestamp: 1725905017625 - kind: conda name: aws-crt-cpp - version: 0.27.5 - build: hd0b8a3b_7 - build_number: 7 + version: 0.28.2 + build: h29c84ef_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.27.5-hd0b8a3b_7.conda - sha256: c2d149777484817c09ec01f4cc4b35c5c2c678b347436c5b44b237bb07926e58 - md5: 059dc1576393ab4b807e74f90e5db6d9 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h29c84ef_4.conda + sha256: 1404b6fd34e6e0e6587b771d4d63800123e0712792982bc2bbb0d78eeca26a94 + md5: 81674a3f6a59966a9ffaaaf063c8c331 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-auth >=0.7.25,<0.7.26.0a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-event-stream >=0.4.2,<0.4.3.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-event-stream >=0.4.3,<0.4.4.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-mqtt >=0.10.4,<0.10.5.0a0 - - aws-c-s3 >=0.6.4,<0.6.5.0a0 + - aws-c-s3 >=0.6.5,<0.6.6.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 345538 - timestamp: 1723755126477 + size: 349192 + timestamp: 1725904799209 - kind: conda name: aws-crt-cpp - version: 0.27.5 - build: hfadd0b3_7 - build_number: 7 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.27.5-hfadd0b3_7.conda - sha256: 0c1940f584eb1997829b532b9653a165b30efcc8aa624320d710329275ff91d2 - md5: 2b9c297b37768ef5164f57044a10693b - depends: - - __osx >=10.13 - - aws-c-auth >=0.7.25,<0.7.26.0a0 - - aws-c-cal >=0.7.3,<0.7.4.0a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-event-stream >=0.4.2,<0.4.3.0a0 - - aws-c-http >=0.8.7,<0.8.8.0a0 + version: 0.28.2 + build: hcae1b89_4 + build_number: 4 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.2-hcae1b89_4.conda + sha256: 0e15511fb4fc3afa1ad5b08f75a508ea1a5ba85f68e0a7e621666104cda60673 + md5: 83ab71884fd2e42b68d0fae48fbcc2b0 + depends: + - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-event-stream >=0.4.3,<0.4.4.0a0 + - aws-c-http >=0.8.8,<0.8.9.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-mqtt >=0.10.4,<0.10.5.0a0 - - aws-c-s3 >=0.6.4,<0.6.5.0a0 + - aws-c-s3 >=0.6.5,<0.6.6.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - libcxx >=16 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 290536 - timestamp: 1723755230863 + size: 254847 + timestamp: 1725905205654 - kind: conda name: aws-sdk-cpp version: 1.11.379 - build: h21b9f41_3 - build_number: 3 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h21b9f41_3.conda - sha256: c53cceee2087da0fff642d66b008cac914552a9c0026cf9fc23efea86a6efd50 - md5: 13821fb121b16aa0e0cc7f18b8d9f5a9 + build: h5a9005d_9 + build_number: 9 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda + sha256: cc2227d97f5e7aed68aeb274a2bec0236af5c20519bde200c8ea7cba114ec978 + md5: 5dc18b385893b7991a3bbeb135ad7c3e depends: - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-event-stream >=0.4.2,<0.4.3.0a0 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 + - libcurl >=8.9.1,<9.0a0 + - libgcc >=13 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 2756951 - timestamp: 1723787192284 + size: 2934257 + timestamp: 1725944617781 - kind: conda name: aws-sdk-cpp version: 1.11.379 - build: h7dc8893_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h7dc8893_3.conda - sha256: 96f9e805f00ee56c8e5c9cbb04bb8360e15ea5c22e88d36a8377cefefb936894 - md5: c077ea74db96ebfd3366a2bae0701448 + build: h76bae87_9 + build_number: 9 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h76bae87_9.conda + sha256: ae01c01fd59d803dd95c97efa54221b93511c43cd1758b525d5b81bbbb5b8a82 + md5: 41de0c545f05649f3ce3a039b40a09b3 depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-event-stream >=0.4.2,<0.4.3.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 - - libcurl >=8.9.1,<9.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 2906830 - timestamp: 1723786045735 + size: 2765550 + timestamp: 1725945456988 - kind: conda name: aws-sdk-cpp version: 1.11.379 - build: hae762b9_3 - build_number: 3 + build: h7a58a96_9 + build_number: 9 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-hae762b9_3.conda - sha256: 2079d4eba178919607cc817a620c68b7693e18fb4e5ac3bd0da0aeb1ec034fbf - md5: 66fa6729fc9979a5c92b112555b10be0 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-h7a58a96_9.conda + sha256: 65ceb0bfddbeaf3f242ad737e1ed4dba77ba1ebc4ce74a02d7fc276aa2df544d + md5: 9d700e1fee39399bf96abf6e66cdd92d depends: - __osx >=10.13 - - aws-c-common >=0.9.27,<0.9.28.0a0 - - aws-c-event-stream >=0.4.2,<0.4.3.0a0 + - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-checksums >=0.1.18,<0.1.19.0a0 - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - libcurl >=8.9.1,<9.0a0 - - libcxx >=16 + - libcxx >=17 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 2734389 - timestamp: 1723786790239 + size: 2754435 + timestamp: 1725944826345 - kind: conda name: azure-core-cpp version: 1.13.0 @@ -3964,7 +3994,6 @@ packages: license_family: Apache purls: - pkg:pypi/bleach?source=hash-mapping - - pkg:pypi/html5lib?source=hash-mapping size: 131220 timestamp: 1696630354218 - kind: conda @@ -4030,13 +4059,13 @@ packages: timestamp: 1719266029046 - kind: conda name: bokeh - version: 3.5.1 + version: 3.5.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.1-pyhd8ed1ab_0.conda - sha256: 3f6558cecdcd2c7865cb43a5b67b66e2387c2f6531eb45b236f33a3c496f4c2f - md5: d1e7e496405a75fd48ea94f2560c6843 + url: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda + sha256: 8af284264eb1cb9c08586ac8c212dcafc929ef1de3db9d0d7f8ca75190a30f4b + md5: 38d785787ec83d0431b3855328395113 depends: - contourpy >=1.2 - jinja2 >=2.9 @@ -4051,9 +4080,9 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/bokeh?source=compressed-mapping - size: 4774914 - timestamp: 1721988747522 + - pkg:pypi/bokeh?source=hash-mapping + size: 4798991 + timestamp: 1724417639170 - kind: pypi name: bokeh-helpers version: 0.1.0 @@ -4085,182 +4114,188 @@ packages: - kind: conda name: brotli version: 1.1.0 - build: h0dc2134_1 - build_number: 1 + build: h00291cd_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h0dc2134_1.conda - sha256: 4bf66d450be5d3f9ebe029b50f818d088b1ef9666b1f19e90c85479c77bbdcde - md5: 9272dd3b19c4e8212f8542cefd5c3d67 + url: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h00291cd_2.conda + sha256: 624954bc08b3d7885a58c7d547282cfb9a201ce79b748b358f801de53e20f523 + md5: 2db0c38a7f2321c5bdaf32b181e832c7 depends: - - brotli-bin 1.1.0 h0dc2134_1 - - libbrotlidec 1.1.0 h0dc2134_1 - - libbrotlienc 1.1.0 h0dc2134_1 + - __osx >=10.13 + - brotli-bin 1.1.0 h00291cd_2 + - libbrotlidec 1.1.0 h00291cd_2 + - libbrotlienc 1.1.0 h00291cd_2 license: MIT license_family: MIT purls: [] - size: 19530 - timestamp: 1695990310168 + size: 19450 + timestamp: 1725267851605 - kind: conda name: brotli version: 1.1.0 - build: hcfcfb64_1 - build_number: 1 + build: h2466b09_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-hcfcfb64_1.conda - sha256: b927c95121c5f3d82fe084730281739fb04621afebf2d9f05711a0f42d27e326 - md5: f47f6db2528e38321fb00ae31674c133 + url: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-h2466b09_2.conda + sha256: d8fd7d1b446706776117d2dcad1c0289b9f5e1521cb13405173bad38568dd252 + md5: 378f1c9421775dfe644731cb121c8979 depends: - - brotli-bin 1.1.0 hcfcfb64_1 - - libbrotlidec 1.1.0 hcfcfb64_1 - - libbrotlienc 1.1.0 hcfcfb64_1 + - brotli-bin 1.1.0 h2466b09_2 + - libbrotlidec 1.1.0 h2466b09_2 + - libbrotlienc 1.1.0 h2466b09_2 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 19772 - timestamp: 1695990547936 + size: 19697 + timestamp: 1725268293988 - kind: conda name: brotli version: 1.1.0 - build: hd590300_1 - build_number: 1 + build: hb9d3cd8_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda - sha256: f2d918d351edd06c55a6c2d84b488fe392f85ea018ff227daac07db22b408f6b - md5: f27a24d46e3ea7b70a1f98e50c62508f + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda + sha256: fcb0b5b28ba7492093e54f3184435144e074dfceab27ac8e6a9457e736565b0b + md5: 98514fe74548d768907ce7a13f680e8f depends: - - brotli-bin 1.1.0 hd590300_1 - - libbrotlidec 1.1.0 hd590300_1 - - libbrotlienc 1.1.0 hd590300_1 - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - brotli-bin 1.1.0 hb9d3cd8_2 + - libbrotlidec 1.1.0 hb9d3cd8_2 + - libbrotlienc 1.1.0 hb9d3cd8_2 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 19383 - timestamp: 1695990069230 + size: 19264 + timestamp: 1725267697072 - kind: conda name: brotli-bin version: 1.1.0 - build: h0dc2134_1 - build_number: 1 + build: h00291cd_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h0dc2134_1.conda - sha256: 7ca3cfb4c5df314ed481301335387ab2b2ee651e2c74fbb15bacc795c664a5f1 - md5: ece565c215adcc47fc1db4e651ee094b + url: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h00291cd_2.conda + sha256: 642a8492491109fd8270c1e2c33b18126712df0cedb94aaa2b1c6b02505a4bfa + md5: 049933ecbf552479a12c7917f0a4ce59 depends: - - libbrotlidec 1.1.0 h0dc2134_1 - - libbrotlienc 1.1.0 h0dc2134_1 + - __osx >=10.13 + - libbrotlidec 1.1.0 h00291cd_2 + - libbrotlienc 1.1.0 h00291cd_2 license: MIT license_family: MIT purls: [] - size: 16660 - timestamp: 1695990286737 + size: 16643 + timestamp: 1725267837325 - kind: conda name: brotli-bin version: 1.1.0 - build: hcfcfb64_1 - build_number: 1 + build: h2466b09_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-hcfcfb64_1.conda - sha256: 4fbcb8f94acc97b2b04adbc64e304acd7c06fa0cf01953527bddae46091cc942 - md5: 0105229d7c5fabaa840043a86c10ec64 + url: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda + sha256: f3bf2893613540ac256c68f211861c4de618d96291719e32178d894114ac2bc2 + md5: d22534a9be5771fc58eb7564947f669d depends: - - libbrotlidec 1.1.0 hcfcfb64_1 - - libbrotlienc 1.1.0 hcfcfb64_1 + - libbrotlidec 1.1.0 h2466b09_2 + - libbrotlienc 1.1.0 h2466b09_2 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 20885 - timestamp: 1695990517506 + size: 20837 + timestamp: 1725268270219 - kind: conda name: brotli-bin version: 1.1.0 - build: hd590300_1 - build_number: 1 + build: hb9d3cd8_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda - sha256: a641abfbaec54f454c8434061fffa7fdaa9c695e8a5a400ed96b4f07c0c00677 - md5: 39f910d205726805a958da408ca194ba + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda + sha256: 261364d7445513b9a4debc345650fad13c627029bfc800655a266bf1e375bc65 + md5: c63b5e52939e795ba8d26e35d767a843 depends: - - libbrotlidec 1.1.0 hd590300_1 - - libbrotlienc 1.1.0 hd590300_1 - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libbrotlidec 1.1.0 hb9d3cd8_2 + - libbrotlienc 1.1.0 hb9d3cd8_2 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 18980 - timestamp: 1695990054140 + size: 18881 + timestamp: 1725267688731 - kind: conda name: brotli-python version: 1.1.0 - build: py312h30efb56_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h30efb56_1.conda - sha256: b68706698b6ac0d31196a8bcb061f0d1f35264bcd967ea45e03e108149a74c6f - md5: 45801a89533d3336a365284d93298e36 + build: py312h275cf98_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda + sha256: f83baa6f6bcba7b73f6921d5c1aa95ffc5d8b246ade933ade79250de0a4c9c4c + md5: a99aec1ac46794a5fb1cd3cf5d2b6110 depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - libbrotlicommon 1.1.0 hd590300_1 + - libbrotlicommon 1.1.0 h2466b09_2 license: MIT license_family: MIT purls: - pkg:pypi/brotli?source=hash-mapping - size: 350604 - timestamp: 1695990206327 + size: 321874 + timestamp: 1725268491976 - kind: conda name: brotli-python version: 1.1.0 - build: py312h53d5487_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h53d5487_1.conda - sha256: 769e276ecdebf86f097786cbde1ebd11e018cd6cd838800995954fe6360e0797 - md5: d01a6667b99f0e8ad4097af66c938e62 + build: py312h2ec8cdc_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda + sha256: f2a59ccd20b4816dea9a2a5cb917eb69728271dbf1aeab4e1b7e609330a50b6f + md5: b0b867af6fc74b2a0aa206da29c0f3cf depends: - - python >=3.12.0rc3,<3.13.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - - libbrotlicommon 1.1.0 hcfcfb64_1 + - libbrotlicommon 1.1.0 hb9d3cd8_2 license: MIT license_family: MIT purls: - pkg:pypi/brotli?source=hash-mapping - size: 322514 - timestamp: 1695991054894 + size: 349867 + timestamp: 1725267732089 - kind: conda name: brotli-python version: 1.1.0 - build: py312heafc425_1 - build_number: 1 + build: py312h5861a67_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312heafc425_1.conda - sha256: fc55988f9bc05a938ea4b8c20d6545bed6e9c6c10aa5147695f981136ca894c1 - md5: a288b88f06b8bfe0dedaf5c4b6ac6b7a + url: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312h5861a67_2.conda + sha256: 265764ff4ad9e5cfefe7ea85c53d95157bf16ac2c0e5f190c528e4c9c0c1e2d0 + md5: b95025822e43128835826ec0cc45a551 depends: - - libcxx >=15.0.7 - - python >=3.12.0rc3,<3.13.0a0 + - __osx >=10.13 + - libcxx >=17 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 constrains: - - libbrotlicommon 1.1.0 h0dc2134_1 + - libbrotlicommon 1.1.0 h00291cd_2 license: MIT license_family: MIT purls: - pkg:pypi/brotli?source=hash-mapping - size: 366883 - timestamp: 1695990710194 + size: 363178 + timestamp: 1725267893889 - kind: conda name: bzip2 version: 1.0.8 @@ -4314,12 +4349,12 @@ packages: timestamp: 1720974491916 - kind: conda name: c-ares - version: 1.33.0 + version: 1.33.1 build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.0-h2466b09_0.conda - sha256: b3f07bc0134a921fee0d3b1306751051da3c1d19eb82b1ae6e14c1f15bcda9c3 - md5: 0864e040b671709d2838790522a8b976 + url: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.1-h2466b09_0.conda + sha256: 2cc89d816e39c7a8afdb0bdb46c3c8558ab3e174397be3300112159758736919 + md5: 8415a266788fd249f5e137487db796b0 depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -4327,75 +4362,75 @@ packages: license: MIT license_family: MIT purls: [] - size: 166933 - timestamp: 1723535152991 + size: 166630 + timestamp: 1724438651925 - kind: conda name: c-ares - version: 1.33.0 - build: h51dda26_0 + version: 1.33.1 + build: h44e7173_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.0-h51dda26_0.conda - sha256: d1f2429bf3d5d1c7e1a0ce5bf6216b563024169293731a130f7d8a64230b9302 - md5: 3355b2350a1de63943bcd053a4fccd6d + url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda + sha256: 98b0ac09472e6737fc4685147d1755028cc650d428369cbe3cb74ab38b327095 + md5: b31a2de5edfddb308dda802eab2956dc depends: - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 163061 - timestamp: 1723534676956 + size: 163203 + timestamp: 1724438157472 - kind: conda name: c-ares - version: 1.33.0 - build: ha66036c_0 + version: 1.33.1 + build: heb4867d_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.0-ha66036c_0.conda - sha256: 3dec5fdb5d1e1758510af0ca163d82ea10109fec8af7d0cd7af38f01068c365b - md5: b6927f788e85267beef6cbb292aaebdd + url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda + sha256: 2cb24f613eaf2850b1a08f28f967b10d8bd44ef623efa0154dc45eb718776be6 + md5: 0d3c60291342c0c025db231353376dfb depends: - __glibc >=2.28,<3.0.a0 - - libgcc-ng >=12 + - libgcc-ng >=13 license: MIT license_family: MIT purls: [] - size: 181873 - timestamp: 1723534591118 + size: 182796 + timestamp: 1724438109690 - kind: conda name: ca-certificates - version: 2024.7.4 + version: 2024.8.30 build: h56e8100_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.7.4-h56e8100_0.conda - sha256: 7f37bb33c7954de1b4d19ad622859feb4f6c58f751c38b895524cad4e44af72e - md5: 9caa97c9504072cd060cf0a3142cc0ed + url: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda + sha256: 0fcac3a7ffcc556649e034a1802aedf795e64227eaa7194d207b01eaf26454c4 + md5: 4c4fd67c18619be5aa65dc5b6c72e490 license: ISC purls: [] - size: 154943 - timestamp: 1720077592592 + size: 158773 + timestamp: 1725019107649 - kind: conda name: ca-certificates - version: 2024.7.4 + version: 2024.8.30 build: h8857fd0_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda - sha256: d16f46c489cb3192305c7d25b795333c5fc17bb0986de20598ed519f8c9cc9e4 - md5: 7df874a4b05b2d2b82826190170eaa0f + url: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda + sha256: 593f302d0f44c2c771e1614ee6d56fffdc7d616e6f187669c8b0e34ffce3e1ae + md5: b7e5424e7f06547a903d28e4651dbb21 license: ISC purls: [] - size: 154473 - timestamp: 1720077510541 + size: 158665 + timestamp: 1725019059295 - kind: conda name: ca-certificates - version: 2024.7.4 + version: 2024.8.30 build: hbcca054_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - sha256: c1548a3235376f464f9931850b64b02492f379b2f2bb98bc786055329b080446 - md5: 23ab7665c5f63cfb9f1f6195256daac6 + url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda + sha256: afee721baa6d988e27fef1832f68d6f32ac8cc99cdf6015732224c2841a09cea + md5: c27d1c142233b5bc9ca570c6e2e0c244 license: ISC purls: [] - size: 154853 - timestamp: 1720077432978 + size: 159003 + timestamp: 1725018903918 - kind: conda name: cached-property version: 1.5.2 @@ -4445,7 +4480,7 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/cachetools?source=compressed-mapping + - pkg:pypi/cachetools?source=hash-mapping size: 14727 timestamp: 1724028288793 - kind: conda @@ -4533,49 +4568,49 @@ packages: timestamp: 1721138900054 - kind: conda name: certifi - version: 2024.7.4 + version: 2024.8.30 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda - sha256: dd3577bb5275062c388c46b075dcb795f47f8dac561da7dd35fe504b936934e5 - md5: 24e7fd6ca65997938fff9e5ab6f653e4 + url: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda + sha256: 7020770df338c45ac6b560185956c32f0a5abf4b76179c037f115fc7d687819f + md5: 12f7d00853807b0531775e9be891cb11 depends: - python >=3.7 license: ISC purls: - pkg:pypi/certifi?source=hash-mapping - size: 159308 - timestamp: 1720458053074 + size: 163752 + timestamp: 1725278204397 - kind: conda name: cffi - version: 1.17.0 - build: py312h1671c18_0 + version: 1.17.1 + build: py312h06ac9bb_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.0-py312h1671c18_0.conda - sha256: 20fe2f88dd7c0ef16e464fa46757821cf569bc71f40a832e7767d3a87250f251 - md5: 33dee889f41b0ba6dbe5ddbe70ebf263 + url: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda + sha256: cba6ea83c4b0b4f5b5dc59cb19830519b28f95d7ebef7c9c5cf1c14843621457 + md5: a861504bbea4161a9170b85d4d2be840 depends: - __glibc >=2.17,<3.0.a0 - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 + - libgcc >=13 - pycparser - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - - pkg:pypi/cffi?source=compressed-mapping - size: 294192 - timestamp: 1723018486671 + - pkg:pypi/cffi?source=hash-mapping + size: 294403 + timestamp: 1725560714366 - kind: conda name: cffi - version: 1.17.0 + version: 1.17.1 build: py312h4389bb4_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.0-py312h4389bb4_0.conda - sha256: f6a2968ca5e7c1dabc2a686b287fb3bcd2a6a60afa748dc0fde85f8d3954e4da - md5: 7373b6b2f20c32e8bc0a5ac283355f3a + url: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda + sha256: ac007bf5fd56d13e16d95eea036433012f2e079dc015505c8a79efebbad1fcbc + md5: 08310c1a22ef957d537e547f8d484f92 depends: - pycparser - python >=3.12,<3.13.0a0 @@ -4586,17 +4621,17 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/cffi?source=compressed-mapping - size: 289216 - timestamp: 1723018797374 + - pkg:pypi/cffi?source=hash-mapping + size: 288142 + timestamp: 1725560896359 - kind: conda name: cffi - version: 1.17.0 - build: py312h9620c06_0 + version: 1.17.1 + build: py312hf857d28_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.0-py312h9620c06_0.conda - sha256: a9ae28779a4ef1b38dd8cedf7f0b4068e75c6388c46214b8ea6431acca1486d2 - md5: a928b653a0cd74e27b6ae52fc2b6be0a + url: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.1-py312hf857d28_0.conda + sha256: 94fe49aed25d84997e2630d6e776a75ee2a85bd64f258702c57faa4fe2986902 + md5: 5bbc69b8194fedc2792e451026cac34f depends: - __osx >=10.13 - libffi >=3.4,<4.0a0 @@ -4606,9 +4641,9 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/cffi?source=compressed-mapping - size: 281831 - timestamp: 1723018702244 + - pkg:pypi/cffi?source=hash-mapping + size: 282425 + timestamp: 1725560725144 - kind: conda name: cfgv version: 3.3.1 @@ -4828,13 +4863,13 @@ packages: timestamp: 1710320435158 - kind: conda name: contextily - version: 1.6.1 + version: 1.6.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.1-pyhd8ed1ab_0.conda - sha256: e721843c13ffbdab3b22ad9c90479318117a29e830b33461bbf1ec01413e85a6 - md5: 85b4886eca5c18c89e10a2c1fb4af7dc + url: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda + sha256: 8ea64368876654f4fe4ad6d39fd8df51462080e9647f8b9d42f5949600640c96 + md5: 98e67488fd805cddb232a7246c915b4f depends: - geopy - joblib @@ -4848,118 +4883,126 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/contextily?source=compressed-mapping - size: 20535 - timestamp: 1723612205680 + - pkg:pypi/contextily?source=hash-mapping + size: 20674 + timestamp: 1725373775230 - kind: conda name: contourpy - version: 1.2.1 - build: py312h0d7def4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.2.1-py312h0d7def4_0.conda - sha256: 3af3de9a099d9ab88d24d0956c3acb838a774b64e52afa25abeed7b31c1174ef - md5: bc0160f16ae02e18de578eaddadd4f61 + version: 1.3.0 + build: py312h68727a3_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_1.conda + sha256: e459bc2d05fabfffcf9bf1f3725e36a5ef64ae7f0b5af312eeaed2e0519e22c8 + md5: 6b9f9141c247bdd61a2d6d37e0a8b530 depends: - - numpy >=1.20 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - numpy >=1.23 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/contourpy?source=hash-mapping - size: 206433 - timestamp: 1712430299728 + size: 272322 + timestamp: 1725378526351 - kind: conda name: contourpy - version: 1.2.1 - build: py312h8572e83_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.1-py312h8572e83_0.conda - sha256: b0731336b9788c247b11a592352f700a647119340b549aba9e933835c7c77df0 - md5: 12c6a831ef734f0b2dd4caff514cbb7f + version: 1.3.0 + build: py312hc5c4d5f_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_1.conda + sha256: 9ccd546024da0a3d2695e21d780f75745b3427047dc073ce88ef545f5bf2e3df + md5: 68996da74a346963430ace9984d627b4 depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - numpy >=1.20 + - __osx >=10.13 + - libcxx >=17 + - numpy >=1.23 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/contourpy?source=hash-mapping - size: 256764 - timestamp: 1712430146809 + size: 256566 + timestamp: 1725378501399 - kind: conda name: contourpy - version: 1.2.1 - build: py312h9230928_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.2.1-py312h9230928_0.conda - sha256: 3879ed298cc9ec5486d13b7d65da960c813925837fe67fc385c9b31f7eefddc0 - md5: 079df34ce7c71259cfdd394645370891 + version: 1.3.0 + build: py312hd5eb7cc_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_1.conda + sha256: 557d32fd30108c4fd44fba60621e30519c1fcf6a361cfd8bef1f3e3eac51eb99 + md5: 1e7201bef33d1d3da3bf95bf0c273879 depends: - - libcxx >=16 - - numpy >=1.20 + - numpy >=1.23 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/contourpy?source=hash-mapping - size: 248928 - timestamp: 1712430234380 + size: 214543 + timestamp: 1725378879919 - kind: conda name: coverage version: 7.6.1 - build: py312h41a817b_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h41a817b_0.conda - sha256: b23db9d9e92ff1e39957eb803c1e6b90540683169714090bb7154f4dedd6d62b - md5: 4006636c39312dc42f8504475be3800f + build: py312h4389bb4_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_1.conda + sha256: cca6398754855d8ffa8412b58a4439f0f183013ae730962ef9cc8150525f3871 + md5: 49b4e0600c84e7d53aae4c042f1e2e4a depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tomli + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: - - pkg:pypi/coverage?source=compressed-mapping - size: 364065 - timestamp: 1722822061660 + - pkg:pypi/coverage?source=hash-mapping + size: 388697 + timestamp: 1724954338520 - kind: conda name: coverage version: 7.6.1 - build: py312h4389bb4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_0.conda - sha256: 8d6aa2188908c3532e4b6582a84db46cb752a39a38e35f1a22adb3eadb48f395 - md5: 85da498a4dca8bd7b9434767b6fc6027 + build: py312h66e93f0_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h66e93f0_1.conda + sha256: 1ad422ed302e3630b26e23238bd1d047674b153c4f0a99e7773faa591aa7eab9 + md5: 5dc6e358ee0af388564bd0eba635cf9e depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tomli - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: - - pkg:pypi/coverage?source=compressed-mapping - size: 390609 - timestamp: 1722822534339 + - pkg:pypi/coverage?source=hash-mapping + size: 363627 + timestamp: 1724953903049 - kind: conda name: coverage version: 7.6.1 - build: py312hbd25219_0 + build: py312hb553811_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hbd25219_0.conda - sha256: 30ebe47fad4de2ab3af3b0a5f2fa29230953d4c0e46198cf4104158f225a518d - md5: 17ee8821c9b8cd8f7ae752f4a57fbf56 + url: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hb553811_1.conda + sha256: fd0f5c84ef943618b378592e74010831a7962127e2759ea75437117ad3f00eee + md5: 49f066bb9337fd34a4c9c09f576ce136 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -4968,9 +5011,9 @@ packages: license: Apache-2.0 license_family: APACHE purls: - - pkg:pypi/coverage?source=compressed-mapping - size: 363125 - timestamp: 1722822122292 + - pkg:pypi/coverage?source=hash-mapping + size: 362574 + timestamp: 1724954071768 - kind: conda name: cycler version: 0.12.1 @@ -5090,19 +5133,19 @@ packages: timestamp: 1683598364427 - kind: conda name: dask - version: 2024.8.1 + version: 2024.8.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.1-pyhd8ed1ab_0.conda - sha256: 1d6a54316fc58cb3be63750dd9144087d2b6e9d3ee35a8077f25d6f2fd87cadd - md5: 95277bf15c984015cb76f85a629d622e + url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda + sha256: 6afd548c338bb418d9645081cbe49b93ffa70f0fb74d9c3c4ed7defd910178ea + md5: 3adbad9b363bd0163ef2ac59f095cc13 depends: - bokeh >=2.4.2,!=3.0.* - cytoolz >=0.11.0 - - dask-core >=2024.8.1,<2024.8.2.0a0 + - dask-core >=2024.8.2,<2024.8.3.0a0 - dask-expr >=1.1,<1.2 - - distributed >=2024.8.1,<2024.8.2.0a0 + - distributed >=2024.8.2,<2024.8.3.0a0 - jinja2 >=2.10.3 - lz4 >=4.3.2 - numpy >=1.21 @@ -5114,19 +5157,18 @@ packages: - openssl !=1.1.1e license: BSD-3-Clause license_family: BSD - purls: - - pkg:pypi/dask?source=compressed-mapping - size: 7359 - timestamp: 1723855241340 + purls: [] + size: 7417 + timestamp: 1725064395582 - kind: conda name: dask-core - version: 2024.8.1 + version: 2024.8.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.1-pyhd8ed1ab_0.conda - sha256: 12aedcd752ae4113c348bf3b8dc1ebdc4aa59d9d2741f3b32a62214d490fe985 - md5: 8fe3858b19843234b331d8459db3a7a1 + url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda + sha256: 1c1b86b719262a7d557327f5c1e363e7039a4078c42270a19dcd9af42fe1404f + md5: 8e7524a2fb561506260db789806c7ee9 depends: - click >=8.1 - cloudpickle >=3.0.0 @@ -5140,29 +5182,29 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/dask?source=compressed-mapping - size: 884957 - timestamp: 1723847729382 + - pkg:pypi/dask?source=hash-mapping + size: 888258 + timestamp: 1725051212771 - kind: conda name: dask-expr - version: 1.1.11 + version: 1.1.13 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.11-pyhd8ed1ab_0.conda - sha256: 60269fbb234fa2eab8b919b607b4514abb60f30d206552e27c9043367a1dfb4a - md5: e66672d843c0bfc65f2e4f9badaf6ba9 + url: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda + sha256: e1b570064d24e85278c53c87e4e361e60fb01a156ce026eac310ff9dcbd85111 + md5: b77166a6032a2b8e52b3fee90d62ea4d depends: - - dask-core 2024.8.1 + - dask-core 2024.8.2 - pandas >=2 - pyarrow - python >=3.10 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/dask-expr?source=compressed-mapping - size: 185254 - timestamp: 1723851662768 + - pkg:pypi/dask-expr?source=hash-mapping + size: 185183 + timestamp: 1725321008333 - kind: conda name: dbus version: 1.13.6 @@ -5184,11 +5226,12 @@ packages: - kind: conda name: debugpy version: 1.8.5 - build: py312h275cf98_0 + build: py312h275cf98_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_0.conda - sha256: 1a77f31f4909f2455aece546d8ef0730cd4b5f08c525c88eafeadd2f60457d44 - md5: 5341f925d61f8c5ca7fcb71c06f89edc + url: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_1.conda + sha256: 44403893fe8d5c2b3416d8377fce34f04b3cb8f4dc79e19161b024cde6814df3 + md5: 51b54280745ac5573ed0937c71c0e514 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -5198,48 +5241,50 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/debugpy?source=compressed-mapping - size: 3099514 - timestamp: 1722924359547 + - pkg:pypi/debugpy?source=hash-mapping + size: 3174333 + timestamp: 1725269561740 - kind: conda name: debugpy version: 1.8.5 - build: py312h28f332c_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h28f332c_0.conda - sha256: f7edf4c79176e84f187435c88ea9afced2a1381021769a29e891c436a7a1af83 - md5: 7de5f5df99688c0616b26a942b2a8161 + build: py312h2ec8cdc_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312h2ec8cdc_1.conda + sha256: 63b027e5605955d22d6bd491316c81876363bce36c7b5fea006a664337d77686 + md5: f89b813bd9fe5ae6e3b7d17e17801f68 depends: - - __osx >=10.13 - - libcxx >=16 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - - pkg:pypi/debugpy?source=compressed-mapping - size: 2076737 - timestamp: 1722924083810 + - pkg:pypi/debugpy?source=hash-mapping + size: 2085616 + timestamp: 1725269284102 - kind: conda name: debugpy version: 1.8.5 - build: py312hca68cad_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312hca68cad_0.conda - sha256: 129964de45b48cb44a377ba926fd96a081ef11ca3d47f5f1b969c2609de30816 - md5: 6c56579c537feaafdf62d6c3b5424c53 + build: py312h5861a67_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h5861a67_1.conda + sha256: 5ad4567872a0aa9f0dace65d9f6b4315f452df7d238bec6a4482c5527b7762fc + md5: 87fcafa1ac8e06b6acd5ee95632adf87 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - __osx >=10.13 + - libcxx >=17 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - - pkg:pypi/debugpy?source=compressed-mapping - size: 2092033 - timestamp: 1722923858548 + - pkg:pypi/debugpy?source=hash-mapping + size: 2059098 + timestamp: 1725269547461 - kind: conda name: decorator version: 5.1.1 @@ -5390,18 +5435,18 @@ packages: timestamp: 1702383349284 - kind: conda name: distributed - version: 2024.8.1 + version: 2024.8.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.1-pyhd8ed1ab_0.conda - sha256: 23410253f3226cf29a69a07cd4148ec2ef520c62b36ea60a2c51e4154b1ae3b4 - md5: 5e5a5b4d85a972250b52cb54452085fd + url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda + sha256: b0eb013bc9fa6d88424ec7bf2a9fb82448d2457edacccc798dea5ef760a6ef01 + md5: 44d22b5d98a219a4c35cafe9bf3b9ce2 depends: - click >=8.0 - - cloudpickle >=2.0.0 + - cloudpickle >=3.0.0 - cytoolz >=0.11.2 - - dask-core >=2024.8.1,<2024.8.2.0a0 + - dask-core >=2024.8.2,<2024.8.3.0a0 - jinja2 >=2.10.3 - locket >=1.0.0 - msgpack-python >=1.0.2 @@ -5420,9 +5465,9 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/distributed?source=compressed-mapping - size: 801253 - timestamp: 1723851507373 + - pkg:pypi/distributed?source=hash-mapping + size: 798375 + timestamp: 1725058359740 - kind: conda name: double-conversion version: 3.3.0 @@ -5566,178 +5611,174 @@ packages: timestamp: 1712591929944 - kind: conda name: executing - version: 2.0.1 + version: 2.1.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda - sha256: c738804ab1e6376f8ea63372229a04c8d658dc90fd5a218c6273a2eaf02f4057 - md5: e16be50e378d8a4533b989035b196ab8 + url: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + sha256: a52d7516e2e11d3eb10908e10d3eb3f8ef267fea99ed9b09d52d96c4db3441b8 + md5: d0441db20c827c11721889a241df1220 depends: - python >=2.7 license: MIT license_family: MIT purls: - pkg:pypi/executing?source=hash-mapping - size: 27689 - timestamp: 1698580072627 + size: 28337 + timestamp: 1725214501850 - kind: conda name: expat - version: 2.6.2 - build: h59595ed_0 + version: 2.6.3 + build: h5888daf_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.2-h59595ed_0.conda - sha256: 89916c536ae5b85bb8bf0cfa27d751e274ea0911f04e4a928744735c14ef5155 - md5: 53fb86322bdb89496d7579fe3f02fd61 + url: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda + sha256: 65bd479c75ce876f26600cb230d6ebc474086e31fa384af9b4282b36842ed7e2 + md5: 6595440079bed734b113de44ffd3cd0a depends: - - libexpat 2.6.2 h59595ed_0 - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libexpat 2.6.3 h5888daf_0 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 137627 - timestamp: 1710362144873 + size: 137891 + timestamp: 1725568750673 - kind: conda name: expat - version: 2.6.2 - build: h63175ca_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.2-h63175ca_0.conda - sha256: f5a13d4bc591a4dc210954f492dd59a0ecf9b9d2ab28bf2ece755ca8f69ec1b4 - md5: 52f9dec6758ceb8ce0ea8af9fa13eb1a + version: 2.6.3 + build: hac325c4_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.3-hac325c4_0.conda + sha256: 79b0da6ca997f7a939bfb9631356afbc519343944fc81cc4261c6b3a85f6db32 + md5: 474cd8746e9f896fc5ae84af3c951796 depends: - - libexpat 2.6.2 h63175ca_0 + - __osx >=10.13 + - libexpat 2.6.3 hac325c4_0 license: MIT license_family: MIT purls: [] - size: 229627 - timestamp: 1710362661692 + size: 128253 + timestamp: 1725568880679 - kind: conda name: expat - version: 2.6.2 - build: h73e2aa4_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.2-h73e2aa4_0.conda - sha256: 0fd1befb18d9d937358a90d5b8f97ac2402761e9d4295779cbad9d7adfb47976 - md5: dc0882915da2ec74696ad87aa2350f27 + version: 2.6.3 + build: he0c23c2_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.3-he0c23c2_0.conda + sha256: 627651a36fe659ce08d79e8bcad00dc5fc35c6e63eb51e5d15a30a7605251998 + md5: a85588222941f75577eb39711058e1de depends: - - libexpat 2.6.2 h73e2aa4_0 + - libexpat 2.6.3 he0c23c2_0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 126612 - timestamp: 1710362607162 + size: 230615 + timestamp: 1725569133557 - kind: conda name: filelock - version: 3.15.4 + version: 3.16.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda - sha256: f78d9c0be189a77cb0c67d02f33005f71b89037a85531996583fb79ff3fe1a0a - md5: 0e7e4388e9d5283e22b35a9443bdbcc9 + url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda + sha256: f55c9af3d92a363fa9e4f164038db85a028befb65d56df0b2cb34911eba8a37a + md5: ec288789b07ae3be555046e099798a56 depends: - python >=3.7 license: Unlicense purls: - pkg:pypi/filelock?source=hash-mapping - size: 17592 - timestamp: 1719088395353 + size: 17402 + timestamp: 1725740654220 - kind: conda name: fiona - version: 1.9.6 - build: py312h6629543_4 - build_number: 4 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/fiona-1.9.6-py312h6629543_4.conda - sha256: 66c0bc1a789ce6dda66d9e2d722a69e582605f3cf2e85ff5e25f250c661246e1 - md5: b27fde46540dc911edac59b7fa24a85a + version: 1.10.0 + build: py312h5aa26c2_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.0-py312h5aa26c2_1.conda + sha256: 8ecbc114d35f1bea4f90107d82773c217c1d1111db51c82af5c215a28f4ae927 + md5: dcff3350fc47354d59e05760db669e7f depends: + - __glibc >=2.17,<3.0.a0 - attrs >=19.2.0 - - certifi - click >=8.0,<9.dev0 - click-plugins >=1.0 - cligj >=0.5 - gdal - - libgdal >=3.9.1,<3.10.0a0 - - libgdal-core >=3.9.1,<3.10.0a0 - - numpy >=1.19,<3 + - libgcc >=13 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 + - libstdcxx >=13 + - pyparsing - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - shapely - - six - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause - license_family: BSD purls: - - pkg:pypi/fiona?source=compressed-mapping - size: 826120 - timestamp: 1722412662780 + - pkg:pypi/fiona?source=hash-mapping + size: 1175161 + timestamp: 1726075405189 - kind: conda name: fiona - version: 1.9.6 - build: py312h96884de_4 - build_number: 4 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.6-py312h96884de_4.conda - sha256: 3ce5730f8cf5a5398ccddcc5c6b68e00d174085d3cd723922857f53541994640 - md5: 542ea7cc15e4d98fcab0bdb789dc1c84 + version: 1.10.0 + build: py312hcd3578f_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.0-py312hcd3578f_1.conda + sha256: 9525e0bbc4e3ead0cde0475bd9cf43e5a03ee5a7dd0b1f6f90b300bfd36fd3f5 + md5: 36140048bbc860971d60f6ecc20d2596 depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - attrs >=19.2.0 - - certifi - click >=8.0,<9.dev0 - click-plugins >=1.0 - cligj >=0.5 - gdal - - libgcc-ng >=12 - - libgdal >=3.9.1,<3.10.0a0 - - libgdal-core >=3.9.1,<3.10.0a0 - - libstdcxx-ng >=12 - - numpy >=1.19,<3 + - libcxx >=17 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 + - pyparsing - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - shapely - - six license: BSD-3-Clause - license_family: BSD purls: - - pkg:pypi/fiona?source=compressed-mapping - size: 981001 - timestamp: 1722411784011 + - pkg:pypi/fiona?source=hash-mapping + size: 1024770 + timestamp: 1726075810757 - kind: conda name: fiona - version: 1.9.6 - build: py312hfc836c0_4 - build_number: 4 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.9.6-py312hfc836c0_4.conda - sha256: 9e85d7c0ae0394f2183439c51aed332d872bed95a82ed9df4c2a13cd9ac62657 - md5: d4ac703308d916395d2d48b0ab21de4b + version: 1.10.0 + build: py312hd215820_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.0-py312hd215820_1.conda + sha256: adc71ebb2b15cf59e4174af2469109a084a7c8e1d41fa98a7aa5bd4c00d77896 + md5: 8b7f1e27c03057f3da5cc200f36051bc depends: - - __osx >=10.13 - attrs >=19.2.0 - - certifi - click >=8.0,<9.dev0 - click-plugins >=1.0 - cligj >=0.5 - gdal - - libcxx >=16 - - libgdal >=3.9.1,<3.10.0a0 - - libgdal-core >=3.9.1,<3.10.0a0 - - numpy >=1.19,<3 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 + - pyparsing - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - shapely - - six + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause - license_family: BSD purls: - - pkg:pypi/fiona?source=compressed-mapping - size: 857936 - timestamp: 1722411798691 + - pkg:pypi/fiona?source=hash-mapping + size: 975399 + timestamp: 1726075777728 - kind: conda name: fmt version: 11.0.2 @@ -5962,54 +6003,57 @@ packages: - kind: conda name: fonttools version: 4.53.1 - build: py312h41a817b_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h41a817b_0.conda - sha256: b1d9f95c13b9caa26689875b0af654b7f464e273eea94abdf5b1be1baa6c8870 - md5: da921c56bcf69a8b97216ecec0cc4015 + build: py312h4389bb4_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_1.conda + sha256: 0f0300e6c6053d9f16844af06c60650c59e20f1e4b1a944bdf0b23377fb2f616 + md5: 6663e0f27c39d39504617e4fe4da3bf6 depends: - - __glibc >=2.17,<3.0.a0 - brotli - - libgcc-ng >=12 - munkres - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2847552 - timestamp: 1720359185195 + size: 2406661 + timestamp: 1725391814010 - kind: conda name: fonttools version: 4.53.1 - build: py312h4389bb4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_0.conda - sha256: 508b8443a382eec4a6c389e0ab43543797a99172982d9999df8972bfa42e2829 - md5: d1d90dc02033f12ab8020dbb653a9fc8 + build: py312h66e93f0_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h66e93f0_1.conda + sha256: 19e4bc017b219e02de712e948d48a23c8bb98dabe741c807949c7fb48abe71d8 + md5: 7abb7d39d482ac3b8e27e6c0fff3b168 depends: + - __glibc >=2.17,<3.0.a0 - brotli + - libgcc >=13 - munkres - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2412400 - timestamp: 1720359443784 + size: 2797458 + timestamp: 1725391437161 - kind: conda name: fonttools version: 4.53.1 - build: py312hbd25219_0 + build: py312hb553811_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hbd25219_0.conda - sha256: bfb83e8a6e95e7d50880cd4811e2312e315d7e8b95b99a405f4056c3162e6ee2 - md5: 56b85d2b2f034ed31feaaa0b90c37b7f + url: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hb553811_1.conda + sha256: 09f1d89bb6ecff8704162a5bd8c1309a978ad5e3a3a4dfe009ea22fb75d070cb + md5: df00a7504c74682d63ae89c32687a3a2 depends: - __osx >=10.13 - brotli @@ -6020,8 +6064,8 @@ packages: license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2714145 - timestamp: 1720359359694 + size: 2724931 + timestamp: 1725391478296 - kind: conda name: fqdn version: 1.5.1 @@ -6149,92 +6193,92 @@ packages: timestamp: 1694953013560 - kind: conda name: fsspec - version: 2024.6.1 + version: 2024.9.0 build: pyhff2d567_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda - sha256: 2b8e98294c70d9a33ee0ef27539a8a8752a26efeafa0225e85dc876ef5bb49f4 - md5: 996bf792cdb8c0ac38ff54b9fde56841 + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + sha256: 8f4e9805b4ec223dea0d99f9e7e57c391d9026455eb9f0d6e0784c5d1a1200dc + md5: ace4329fbff4c69ab0309db6da182987 depends: - python >=3.8 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/fsspec?source=hash-mapping - size: 133141 - timestamp: 1719515065535 + size: 134378 + timestamp: 1725543368393 - kind: conda name: gdal version: 3.9.2 - build: py312h16ac12d_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_0.conda - sha256: 731397cc0c20d0d6bfffbf4eef9452dc44c72112e37d45f85f0392dd9b65ff78 - md5: e1c0f151032531482eff718d1cc0f116 + build: py312h1299960_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_2.conda + sha256: edd3249b5c4d5fc84ac5aafe7a41f1a4e5d5ff2d474f37a7ce2484d50553b6a8 + md5: ffe1e203655d54e3d2e42de350c5785f depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - libgdal-core 3.9.2.* - libkml >=1.3.0,<1.4.0a0 + - libstdcxx >=13 - libxml2 >=2.12.7,<3.0a0 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: - - pkg:pypi/gdal?source=compressed-mapping - size: 1631798 - timestamp: 1723840488189 + - pkg:pypi/gdal?source=hash-mapping + size: 1697617 + timestamp: 1726092649530 - kind: conda name: gdal version: 3.9.2 - build: py312h29648be_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h29648be_0.conda - sha256: baf58f017cf4a0ad17a51648e259631fd288996dd8563d2887a262cce5a0511c - md5: 64122edb42acc4ec068eccbb891b6e54 + build: py312h16ac12d_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_2.conda + sha256: b912c4856443b46ceeaf70aaecd831357331560abb87321f481943670dac4476 + md5: 5fe9072721d3fe55f3d8b016019ccadd depends: - - __osx >=10.13 - - libcxx >=16 - libgdal-core 3.9.2.* - libkml >=1.3.0,<1.4.0a0 - libxml2 >=2.12.7,<3.0a0 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: - - pkg:pypi/gdal?source=compressed-mapping - size: 1676335 - timestamp: 1723839692636 + - pkg:pypi/gdal?source=hash-mapping + size: 1637570 + timestamp: 1726094182525 - kind: conda name: gdal version: 3.9.2 - build: py312h7eda2e2_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h7eda2e2_0.conda - sha256: 7d4bc28c1e243e82ee07097de1184eec1208593006c325c7cbe807ceeca5740c - md5: 92dfbc773e70ce5a5728135378da5766 + build: py312h365dfcf_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_2.conda + sha256: 502bb52738130128f5dfe692dd8b206160c7a8c286c21f14a6ec689529a024d3 + md5: 640b75d6bebe65db4d09f999f7230f38 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - __osx >=10.13 + - libcxx >=17 - libgdal-core 3.9.2.* - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 - libxml2 >=2.12.7,<3.0a0 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT - license_family: MIT purls: - - pkg:pypi/gdal?source=compressed-mapping - size: 1699202 - timestamp: 1723839179852 + - pkg:pypi/gdal?source=hash-mapping + size: 1679539 + timestamp: 1726092790963 - kind: conda name: geocube version: 0.5.2 @@ -6592,22 +6636,22 @@ packages: timestamp: 1711634622644 - kind: conda name: griffe - version: 1.1.0 + version: 1.3.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/griffe-1.1.0-pyhd8ed1ab_0.conda - sha256: 5a23282aa11b5b2e5df0202769a3654936d5f905a80779f3ac021e4074069616 - md5: da6d3a480566b6bbccc608c436db7696 + url: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda + sha256: 345891673e53b7cef21e2aea481475015bf3f93d5128a9531f87aa4b6fbd05f5 + md5: 9f55045a47cb6fffdc493c10a2b7f463 depends: - astunparse >=1.6 - colorama >=0.4 - python >=3.8 license: ISC purls: - - pkg:pypi/griffe?source=compressed-mapping - size: 95827 - timestamp: 1723945410234 + - pkg:pypi/griffe?source=hash-mapping + size: 97711 + timestamp: 1726136969053 - kind: conda name: h11 version: 0.14.0 @@ -6858,13 +6902,13 @@ packages: timestamp: 1711597091407 - kind: conda name: httpx - version: 0.27.0 + version: 0.27.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.0-pyhd8ed1ab_0.conda - sha256: fdaf341fb2630b7afe8238315448fc93947f77ebfa4da68bb349e1bcf820af58 - md5: 9f359af5a886fd6ca6b2b6ea02e58332 + url: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda + sha256: 1a33f160548bf447e15c0273899d27e4473f1d5b7ca1441232ec2d9d07c56d03 + md5: 7e9ac3faeebdbd7b53b462c41891e7f7 depends: - anyio - certifi @@ -6876,8 +6920,8 @@ packages: license_family: BSD purls: - pkg:pypi/httpx?source=hash-mapping - size: 64651 - timestamp: 1708531043505 + size: 65085 + timestamp: 1724778453275 - kind: pypi name: hydamo version: 0.1.0 @@ -6974,30 +7018,30 @@ packages: timestamp: 1720413864262 - kind: conda name: idna - version: '3.7' + version: '3.8' build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - sha256: 9687ee909ed46169395d4f99a0ee94b80a52f87bed69cd454bb6d37ffeb0ec7b - md5: c0cc1420498b17414d8617d0b9f506ca + url: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + sha256: 8660d38b272d3713ec8ac5ae918bc3bc80e1b81e1a7d61df554bded71ada6110 + md5: 99e164522f6bdf23c177c8d9ae63f975 depends: - python >=3.6 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/idna?source=hash-mapping - size: 52718 - timestamp: 1713279497047 + size: 49275 + timestamp: 1724450633325 - kind: conda name: importlib-metadata - version: 8.2.0 + version: 8.5.0 build: pyha770c72_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.2.0-pyha770c72_0.conda - sha256: 15dd2beba1c6f780fec6c5351bbce815d27a29561f422fe830133c995ef90b8a - md5: c261d14fc7f49cdd403868998a18c318 + url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + sha256: 7194700ce1a5ad2621fd68e894dd8c1ceaff9a38723e6e0e5298fdef13017b1c + md5: 54198435fce4d64d8a89af22573012a8 depends: - python >=3.8 - zipp >=0.5 @@ -7005,60 +7049,59 @@ packages: license_family: APACHE purls: - pkg:pypi/importlib-metadata?source=hash-mapping - size: 28110 - timestamp: 1721856614564 + size: 28646 + timestamp: 1726082927916 - kind: conda name: importlib-resources - version: 6.4.3 + version: 6.4.5 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.3-pyhd8ed1ab_0.conda - sha256: 4611a7ef21e4bf1b7e67493aacdf69d37e9ca972d7e0f197627316e0790e168b - md5: b8fd70ef9ad7a171ce220f4bf3201883 + url: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + sha256: b5a63a3e2bc2c8d3e5978a6ef4efaf2d6b02803c1bce3c2eb42e238dd91afe0b + md5: 67f4772681cf86652f3e2261794cf045 depends: - - importlib_resources >=6.4.3,<6.4.4.0a0 + - importlib_resources >=6.4.5,<6.4.6.0a0 - python >=3.8 license: Apache-2.0 - purls: - - pkg:pypi/importlib-resources?source=compressed-mapping - size: 9521 - timestamp: 1724060191769 + purls: [] + size: 9595 + timestamp: 1725921472017 - kind: conda name: importlib_metadata - version: 8.2.0 + version: 8.5.0 build: hd8ed1ab_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.2.0-hd8ed1ab_0.conda - sha256: 4a0eacc41786d97176fb53c19d25c4f9b8ab4c9a0ee1fd6f09bc13ca197c21d9 - md5: 0fd030dce707a6654472cf7619b0b01b + url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + sha256: 313b8a05211bacd6b15ab2621cb73d7f41ea5c6cae98db53367d47833f03fef1 + md5: 2a92e152208121afadf85a5e1f3a5f4d depends: - - importlib-metadata >=8.2.0,<8.2.1.0a0 + - importlib-metadata >=8.5.0,<8.5.1.0a0 license: Apache-2.0 license_family: APACHE purls: [] - size: 9330 - timestamp: 1721856618848 + size: 9385 + timestamp: 1726082930346 - kind: conda name: importlib_resources - version: 6.4.3 + version: 6.4.5 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.3-pyhd8ed1ab_0.conda - sha256: 310ce095eada15a5cb5611c26e2c1caacd02f894452642b787db44ef86d668bf - md5: 82b36c572ecc0d42c612203769e19de5 + url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda + sha256: 2cb9db3e40033c3df72d3defc678a012840378fd55a67e4351363d4b321a0dc1 + md5: c808991d29b9838fb4d96ce8267ec9ec depends: - python >=3.8 - zipp >=3.1.0 constrains: - - importlib-resources >=6.4.3,<6.4.4.0a0 + - importlib-resources >=6.4.5,<6.4.6.0a0 license: Apache-2.0 purls: - pkg:pypi/importlib-resources?source=compressed-mapping - size: 32045 - timestamp: 1724060180208 + size: 32725 + timestamp: 1725921462405 - kind: conda name: iniconfig version: 2.0.0 @@ -7183,13 +7226,13 @@ packages: timestamp: 1719845667420 - kind: conda name: ipython - version: 8.26.0 + version: 8.27.0 build: pyh707e725_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh707e725_0.conda - sha256: a40c2859a055d98ba234d67b233fb1ba55d86cbe632ec96eecb7c5019c16478b - md5: f64d3520d5d00321c10f4dabb5b903f3 + url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda + sha256: 4eaa22b1afdbd0076ab1cc8da99d9c62f5c5f14cd0a30ff99c133e22f2db5a58 + md5: 0ed09f0c0f62f50b4b7dd2744af13629 depends: - __unix - decorator @@ -7208,17 +7251,17 @@ packages: license_family: BSD purls: - pkg:pypi/ipython?source=hash-mapping - size: 599279 - timestamp: 1719582627972 + size: 598878 + timestamp: 1725050237172 - kind: conda name: ipython - version: 8.26.0 + version: 8.27.0 build: pyh7428d3b_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.26.0-pyh7428d3b_0.conda - sha256: b0fd9f89ef87c4b968ae8aae01c4ff8969eb4463f1fb28c77ff0b33b444d9cef - md5: f5047e2bc6a82dcdf2f169fdb0bbed99 + url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh7428d3b_0.conda + sha256: 2826fae9530bf5ea53b3b825483d9bd1c01b5635aebc37e0f56003bab434ade6 + md5: d7f3d6377b3988475bd1fa6493b7b115 depends: - __win - colorama @@ -7237,8 +7280,8 @@ packages: license_family: BSD purls: - pkg:pypi/ipython?source=hash-mapping - size: 600345 - timestamp: 1719583103556 + size: 600176 + timestamp: 1725050732048 - kind: conda name: isoduration version: 20.11.0 @@ -7364,11 +7407,12 @@ packages: - kind: conda name: jsonpointer version: 3.0.0 - build: py312h2e8e312_0 + build: py312h2e8e312_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_0.conda - sha256: 74d440e8250ff2ca05013b959de954bc85d84ff14a3b60c9e3dc7e071cddfa42 - md5: 6509bc42d9d26be656db3332da504913 + url: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_1.conda + sha256: 6865b97780e795337f65592582aee6f25e5b96214c64ffd3f8cdf580fd64ba22 + md5: e3ceda014d8461a11ca8552830a978f9 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -7376,16 +7420,17 @@ packages: license_family: BSD purls: - pkg:pypi/jsonpointer?source=hash-mapping - size: 42461 - timestamp: 1718283943216 + size: 42235 + timestamp: 1725303419414 - kind: conda name: jsonpointer version: 3.0.0 - build: py312h7900ff3_0 + build: py312h7900ff3_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_0.conda - sha256: b5d17c5db3c7306d3625745a27359f806a6dd94707d76d74cba541fc1daa2ae3 - md5: 320338762418ae59539ae368d4386085 + url: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda + sha256: 76ccb7bffc7761d1d3133ffbe1f7f1710a0f0d9aaa9f7ea522652e799f3601f4 + md5: 6b51f7459ea4073eeb5057207e2e1e3d depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -7393,16 +7438,17 @@ packages: license_family: BSD purls: - pkg:pypi/jsonpointer?source=hash-mapping - size: 17497 - timestamp: 1718283512438 + size: 17277 + timestamp: 1725303032027 - kind: conda name: jsonpointer version: 3.0.0 - build: py312hb401068_0 + build: py312hb401068_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_0.conda - sha256: c28d5ee8ddc58858c711f0a4874916ed7d1306fa8b12bb95e3e8bb7183f2e287 - md5: 7d360dce2fa56d1701773d26ecccb038 + url: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_1.conda + sha256: 52fcb1db44a935bba26988cc17247a0f71a8ad2fbc2b717274a8c8940856ee0d + md5: 5dcf96bca4649d496d818a0f5cfb962e depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -7410,8 +7456,8 @@ packages: license_family: BSD purls: - pkg:pypi/jsonpointer?source=hash-mapping - size: 17704 - timestamp: 1718283533709 + size: 17560 + timestamp: 1725303027769 - kind: conda name: jsonschema version: 4.23.0 @@ -7657,13 +7703,13 @@ packages: timestamp: 1710262791393 - kind: conda name: jupyterlab - version: 4.2.4 + version: 4.2.5 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.4-pyhd8ed1ab_0.conda - sha256: e3b585b55634da48871ed3082c429652a62bf0cf7733641b1382b9c314f1c901 - md5: 28f3334e97c39de2b7ac15743b041784 + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda + sha256: db08036a6fd846c178ebdce7327be1130bda10ac96113c17b04bce2bc4d67dda + md5: 594762eddc55b82feac6097165a88e3c depends: - async-lru >=1.0.0 - httpx >=0.25.0 @@ -7686,8 +7732,8 @@ packages: license_family: BSD purls: - pkg:pypi/jupyterlab?source=hash-mapping - size: 8187486 - timestamp: 1721396667021 + size: 7361961 + timestamp: 1724745262468 - kind: conda name: jupyterlab_pygments version: 0.3.0 @@ -7739,12 +7785,12 @@ packages: - kind: conda name: kealib version: 1.5.3 - build: h6c43f9b_1 - build_number: 1 + build: h6c43f9b_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.3-h6c43f9b_1.conda - sha256: b4b2cee0ad62ae1f8e4a541d34074c575df935682c023fdf1c21c9c5c9995fa9 - md5: a20c9e3598a55ca3e61cad90ef33ada3 + url: https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.3-h6c43f9b_2.conda + sha256: 19c981a049651439cfd851bbf785144d0f10db1f605ce19001a8eb27da6def94 + md5: 873b3deabbefe46d00cc81ce7d9547a7 depends: - hdf5 >=1.14.3,<1.14.4.0a0 - ucrt >=10.0.20348.0 @@ -7753,44 +7799,45 @@ packages: license: MIT license_family: MIT purls: [] - size: 133355 - timestamp: 1716158947179 + size: 133242 + timestamp: 1725399840908 - kind: conda name: kealib version: 1.5.3 - build: hb2b617a_1 - build_number: 1 + build: he475af8_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/kealib-1.5.3-hb2b617a_1.conda - sha256: 3150dedf047284e8b808a169dfe630d818d8513b79d08a5404b90973c61c6914 - md5: e24e1fa559fd29c34593d6a47b459443 + url: https://conda.anaconda.org/conda-forge/osx-64/kealib-1.5.3-he475af8_2.conda + sha256: 12badb5e2f8bd38bee33a3c3ec0108a37f106f286e2caad97d8c660936d59249 + md5: 1d0f27a93940d512f681fe3f4f7439f0 depends: - __osx >=10.13 - hdf5 >=1.14.3,<1.14.4.0a0 - - libcxx >=16 + - libcxx >=17 license: MIT license_family: MIT purls: [] - size: 152270 - timestamp: 1716158359765 + size: 150151 + timestamp: 1725399603970 - kind: conda name: kealib version: 1.5.3 - build: hee9dde6_1 - build_number: 1 + build: hf8d3e68_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hee9dde6_1.conda - sha256: d607ddb5906a335cb3665dd81f3adec4af248cf398147693b470b65d887408e7 - md5: c5b7b29e2b66107553d0366538257a51 + url: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hf8d3e68_2.conda + sha256: a45cb038fce2b6fa154cf0c71485a75b59cb1d8d6b0465bdcb23736aca6bf2ac + md5: ffe68c611ae0ccfda4e7a605195e22b3 depends: + - __glibc >=2.17,<3.0.a0 - hdf5 >=1.14.3,<1.14.4.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 license: MIT license_family: MIT purls: [] - size: 170709 - timestamp: 1716158265533 + size: 180005 + timestamp: 1725399272056 - kind: conda name: keyutils version: 1.6.1 @@ -7807,64 +7854,63 @@ packages: timestamp: 1646151697040 - kind: conda name: kiwisolver - version: 1.4.5 - build: py312h0d7def4_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.5-py312h0d7def4_1.conda - sha256: 07021ffc3bbf42922694c23634e028950547d088717b448b46296b3ca5a26068 - md5: 77c9d46fc8680bb08f4e1ebb6669e44e + version: 1.4.7 + build: py312h68727a3_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda + sha256: d752c53071ee5d712baa9742dd1629e60388c5ce4ab11d4e73a1690443e41769 + md5: 444266743652a4f1538145e9362f6d3b depends: - - python >=3.12.0rc3,<3.13.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/kiwisolver?source=hash-mapping - size: 55576 - timestamp: 1695380565733 + size: 70922 + timestamp: 1725459412788 - kind: conda name: kiwisolver - version: 1.4.5 - build: py312h49ebfd2_1 - build_number: 1 + version: 1.4.7 + build: py312hc5c4d5f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/kiwisolver-1.4.5-py312h49ebfd2_1.conda - sha256: 11d9daa79051a7ae52881d11f48816366fd3d46018281431abe507da7b45f69c - md5: 21f174a5cfb5964069c374171a979157 + url: https://conda.anaconda.org/conda-forge/osx-64/kiwisolver-1.4.7-py312hc5c4d5f_0.conda + sha256: 87470d7eed470c01efa19dd0d5a2eca9149afa1176d1efc50c475b3b81df62c1 + md5: 7b72389a8a3ba350285f86933ab85da0 depends: - - libcxx >=15.0.7 - - python >=3.12.0rc3,<3.13.0a0 + - __osx >=10.13 + - libcxx >=17 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/kiwisolver?source=hash-mapping - size: 60227 - timestamp: 1695380392812 + size: 62176 + timestamp: 1725459509941 - kind: conda name: kiwisolver - version: 1.4.5 - build: py312h8572e83_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py312h8572e83_1.conda - sha256: 2ffd3f6726392591c6794ab130f6701f5ffba0ec8658ef40db5a95ec8d583143 - md5: c1e71f2bc05d8e8e033aefac2c490d05 + version: 1.4.7 + build: py312hd5eb7cc_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.7-py312hd5eb7cc_0.conda + sha256: b5b3ed78e4c44483afb68f53427db3d232ddf7930ca180bb00fa86ceca7cf7e4 + md5: 1eddb74a9fbb1d4d6fde9aef272ad1d0 depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/kiwisolver?source=hash-mapping - size: 72099 - timestamp: 1695380122482 + size: 55405 + timestamp: 1725459633511 - kind: conda name: krb5 version: 1.21.3 @@ -8222,27 +8268,22 @@ packages: - kind: conda name: libarrow version: 17.0.0 - build: h2952479_8_cpu - build_number: 8 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h2952479_8_cpu.conda - sha256: 176e9d4269392c1056d2873bc7342a3707ae76e69a0c28244e1eb849d178d8f6 - md5: c72368f864d0c02dd690b95c37330cc3 + build: h29daf90_13_cpu + build_number: 13 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h29daf90_13_cpu.conda + sha256: 1a0f66e822f4cde398b15fe7ac94cb4197635798da9feebcb88c900637e05f77 + md5: d0ea8c4474c45aae86eff71a0f293013 depends: - - __osx >=10.13 - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.8.0,<1.8.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-files-datalake-cpp >=12.11.0,<12.11.1.0a0 - bzip2 >=1.0.8,<2.0a0 - - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - - libcxx >=17 + - libcrc32c >=1.1.2,<1.2.0a0 + - libcurl >=8.9.1,<9.0a0 - libgoogle-cloud >=2.28.0,<2.29.0a0 - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 - libre2-11 >=2023.9.1,<2024.0a0 @@ -8252,47 +8293,54 @@ packages: - orc >=2.0.2,<2.0.3.0a0 - re2 - snappy >=1.2.1,<1.3.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.6.0a0 constrains: - - parquet-cpp <0.0a0 - arrow-cpp <0.0a0 + - parquet-cpp <0.0a0 - apache-arrow-proc =*=cpu license: Apache-2.0 license_family: APACHE purls: [] - size: 5906936 - timestamp: 1723787555959 + size: 5128979 + timestamp: 1725215183038 - kind: conda name: libarrow version: 17.0.0 - build: h6e8cf4f_8_cpu - build_number: 8 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h6e8cf4f_8_cpu.conda - sha256: 678017b1f42a0d7413e967e4cd8c6ddde4fc5fd54ec539fe4ab8b28c469c255b - md5: 62ff39484dee01fdcde69bd40de4cd9b + build: h8d2e343_13_cpu + build_number: 13 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda + sha256: 91e639761f29ee1ca144e92110d47c8e68038f26201eef25585a48826e037fb2 + md5: dc379f362829d5df5ce6722565110029 depends: - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - __glibc >=2.17,<3.0.a0 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 + - azure-core-cpp >=1.13.0,<1.13.1.0a0 + - azure-identity-cpp >=1.8.0,<1.8.1.0a0 + - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 + - azure-storage-files-datalake-cpp >=12.11.0,<12.11.1.0a0 - bzip2 >=1.0.8,<2.0a0 + - gflags >=2.2.2,<2.3.0a0 + - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl >=8.9.1,<9.0a0 + - libgcc >=13 - libgoogle-cloud >=2.28.0,<2.29.0a0 - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 - libre2-11 >=2023.9.1,<2024.0a0 + - libstdcxx >=13 - libutf8proc >=2.8.0,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - orc >=2.0.2,<2.0.3.0a0 - re2 - snappy >=1.2.1,<1.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.6.0a0 constrains: - arrow-cpp <0.0a0 @@ -8301,37 +8349,35 @@ packages: license: Apache-2.0 license_family: APACHE purls: [] - size: 5155629 - timestamp: 1723787774579 + size: 8512685 + timestamp: 1725214716301 - kind: conda name: libarrow version: 17.0.0 - build: h8756180_8_cpu - build_number: 8 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8756180_8_cpu.conda - sha256: 8ec8b6b036e96e3b3336b2a0ba6031c12366a44a102db1dc1a2f390d3114a70f - md5: 7fac330a6725172a912cc484a0f93825 + build: ha60c65e_13_cpu + build_number: 13 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-ha60c65e_13_cpu.conda + sha256: d8096066ce779a82cbb2045030f8095ed5689cac2ac1ee0c58251e7f448f1a87 + md5: 4cdf43459510697d824c377428a120b1 depends: - - __glibc >=2.17,<3.0.a0 - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - __osx >=10.13 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - azure-identity-cpp >=1.8.0,<1.8.1.0a0 - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - azure-storage-files-datalake-cpp >=12.11.0,<12.11.1.0a0 - bzip2 >=1.0.8,<2.0a0 - - gflags >=2.2.2,<2.3.0a0 - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - - libgcc-ng >=12 + - libcxx >=17 - libgoogle-cloud >=2.28.0,<2.29.0a0 - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 - libre2-11 >=2023.9.1,<2024.0a0 - - libstdcxx-ng >=12 - libutf8proc >=2.8.0,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 @@ -8340,147 +8386,147 @@ packages: - snappy >=1.2.1,<1.3.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - - parquet-cpp <0.0a0 - arrow-cpp <0.0a0 + - parquet-cpp <0.0a0 - apache-arrow-proc =*=cpu license: Apache-2.0 license_family: APACHE purls: [] - size: 8423523 - timestamp: 1723787570616 + size: 5899274 + timestamp: 1725214352592 - kind: conda name: libarrow-acero version: 17.0.0 - build: hac325c4_8_cpu - build_number: 8 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_8_cpu.conda - sha256: 03b050f81e80357acc85a4bad183a4b4405c6f483e6226b59df7046219063d97 - md5: 0959238adfeefd5d4faa56f969c086a1 + build: h5888daf_13_cpu + build_number: 13 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda + sha256: cda9e38ad7af7ba72416031b089de5048f8526ae586149ff9f6506366689d699 + md5: b654d072b8d5da807495e49b28a0b884 depends: - - __osx >=10.13 - - libarrow 17.0.0 h2952479_8_cpu - - libcxx >=17 + - __glibc >=2.17,<3.0.a0 + - libarrow 17.0.0 h8d2e343_13_cpu + - libgcc >=13 + - libstdcxx >=13 license: Apache-2.0 license_family: APACHE purls: [] - size: 514783 - timestamp: 1723787684450 + size: 609649 + timestamp: 1725214754397 - kind: conda name: libarrow-acero version: 17.0.0 - build: he02047a_8_cpu - build_number: 8 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-he02047a_8_cpu.conda - sha256: b4d5546c8c7a1dc4476fa88c97f0bca9720f6ece1f60873625989d900f1eb85b - md5: 1151aa2dcc30d03c775b8233334f7d24 + build: hac325c4_13_cpu + build_number: 13 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_13_cpu.conda + sha256: c6195a789edb257746ca9f8648419c9efdb67e0ef62d2ba818eaa921f94e90af + md5: 218079f1d0ba0a46246db86a9e96c417 depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h8756180_8_cpu - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - __osx >=10.13 + - libarrow 17.0.0 ha60c65e_13_cpu + - libcxx >=17 license: Apache-2.0 license_family: APACHE purls: [] - size: 598342 - timestamp: 1723787612766 + size: 515115 + timestamp: 1725214443841 - kind: conda name: libarrow-acero version: 17.0.0 - build: he0c23c2_8_cpu - build_number: 8 + build: he0c23c2_13_cpu + build_number: 13 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_8_cpu.conda - sha256: b41eac77c0ae102072d02d115a02d20220bc7cc71cdecf908be39c3937750906 - md5: bdb442cf59fa7003345ebe441c73f7c3 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_13_cpu.conda + sha256: 850b28abba3e40302cb5425ffb96f085d2089decafb2e80d85b4f8b44c2c777d + md5: 1a38e993ef119557596ae20cd68a1207 depends: - - libarrow 17.0.0 h6e8cf4f_8_cpu + - libarrow 17.0.0 h29daf90_13_cpu - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: [] - size: 444981 - timestamp: 1723787846964 + size: 445286 + timestamp: 1725215254997 - kind: conda name: libarrow-dataset version: 17.0.0 - build: hac325c4_8_cpu - build_number: 8 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_8_cpu.conda - sha256: d88f29865bdabb1c7c393376187e203695d24f5a9bc21f49f211f43e3de72cfc - md5: 3065f53e69e73473965722303421b207 + build: h5888daf_13_cpu + build_number: 13 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda + sha256: b3fac9bc9a399670d6993738d018324d6e1b0a85755b484204405bb72efabc4e + md5: cd2c36e8865b158b82f61c6aac28b7e1 depends: - - __osx >=10.13 - - libarrow 17.0.0 h2952479_8_cpu - - libarrow-acero 17.0.0 hac325c4_8_cpu - - libcxx >=17 - - libparquet 17.0.0 hf1b0f52_8_cpu + - __glibc >=2.17,<3.0.a0 + - libarrow 17.0.0 h8d2e343_13_cpu + - libarrow-acero 17.0.0 h5888daf_13_cpu + - libgcc >=13 + - libparquet 17.0.0 h39682fd_13_cpu + - libstdcxx >=13 license: Apache-2.0 license_family: APACHE purls: [] - size: 507423 - timestamp: 1723788598288 + size: 582848 + timestamp: 1725214820464 - kind: conda name: libarrow-dataset version: 17.0.0 - build: he02047a_8_cpu - build_number: 8 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-he02047a_8_cpu.conda - sha256: 46b95328d961bbf21bd337a9015384218a37e31e3f4c5fe4320682af15a3ea47 - md5: 7e06a68fda280d149d9a43bae84dd374 + build: hac325c4_13_cpu + build_number: 13 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_13_cpu.conda + sha256: de66e86133af737ecafd67a043f2756afb78fe77503bcf8e1dc2b73a706f55b5 + md5: d7609f5867208b278655602ac636363b depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h8756180_8_cpu - - libarrow-acero 17.0.0 he02047a_8_cpu - - libgcc-ng >=12 - - libparquet 17.0.0 haa1307c_8_cpu - - libstdcxx-ng >=12 + - __osx >=10.13 + - libarrow 17.0.0 ha60c65e_13_cpu + - libarrow-acero 17.0.0 hac325c4_13_cpu + - libcxx >=17 + - libparquet 17.0.0 hf1b0f52_13_cpu license: Apache-2.0 license_family: APACHE purls: [] - size: 580241 - timestamp: 1723787687906 + size: 506575 + timestamp: 1725215307580 - kind: conda name: libarrow-dataset version: 17.0.0 - build: he0c23c2_8_cpu - build_number: 8 + build: he0c23c2_13_cpu + build_number: 13 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_8_cpu.conda - sha256: 7c71e7780aa9929c54fe346624f1771e5ac1a8319c039bbcc8b0348dac8982ec - md5: 75aaac52a012ee897bfb6f78fa0593bc + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_13_cpu.conda + sha256: 12b0395dc22a2c3fb03e8b8ab32bcf4ff08947b8611b2a1e9c49644d8391893c + md5: dd78096e1335abc3c7bf6915d0ac7c34 depends: - - libarrow 17.0.0 h6e8cf4f_8_cpu - - libarrow-acero 17.0.0 he0c23c2_8_cpu - - libparquet 17.0.0 ha915800_8_cpu + - libarrow 17.0.0 h29daf90_13_cpu + - libarrow-acero 17.0.0 he0c23c2_13_cpu + - libparquet 17.0.0 ha915800_13_cpu - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: [] - size: 427138 - timestamp: 1723788063640 + size: 427535 + timestamp: 1725215469376 - kind: conda name: libarrow-substrait version: 17.0.0 - build: h1f0e801_8_cpu - build_number: 8 + build: h1f0e801_13_cpu + build_number: 13 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_8_cpu.conda - sha256: b865fba2df586b491a3ef53e412703dc755cdc4769d620b1f092ce88fd798c09 - md5: a3a7034bff5cce176f84f23e9dcf84be + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_13_cpu.conda + sha256: 637c2652cfe676d6949f7953de7d51e90bc35863c3a114c29795b5b0e119699c + md5: b618c36e7eff7a28a53bde4d9aa017e0 depends: - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - - libarrow 17.0.0 h6e8cf4f_8_cpu - - libarrow-acero 17.0.0 he0c23c2_8_cpu - - libarrow-dataset 17.0.0 he0c23c2_8_cpu + - libarrow 17.0.0 h29daf90_13_cpu + - libarrow-acero 17.0.0 he0c23c2_13_cpu + - libarrow-dataset 17.0.0 he0c23c2_13_cpu - libprotobuf >=4.25.3,<4.25.4.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -8488,55 +8534,55 @@ packages: license: Apache-2.0 license_family: APACHE purls: [] - size: 382514 - timestamp: 1723788163377 + size: 382757 + timestamp: 1725215569161 - kind: conda name: libarrow-substrait version: 17.0.0 - build: hba007a9_8_cpu - build_number: 8 + build: hba007a9_13_cpu + build_number: 13 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_8_cpu.conda - sha256: 8fbe57c403f94e7f4fd30e1950f69420ca2ef721464e78e10a8ea8147fc52043 - md5: b9e043a6830b11a5e5e6656805de1dbb + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_13_cpu.conda + sha256: 729523ec54db45127b1e644454d3612ce48196c27426ae5c2ace022b6791bf53 + md5: 883ffa72318b7952df9a21243ab2f281 depends: - __osx >=10.13 - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - - libarrow 17.0.0 h2952479_8_cpu - - libarrow-acero 17.0.0 hac325c4_8_cpu - - libarrow-dataset 17.0.0 hac325c4_8_cpu + - libarrow 17.0.0 ha60c65e_13_cpu + - libarrow-acero 17.0.0 hac325c4_13_cpu + - libarrow-dataset 17.0.0 hac325c4_13_cpu - libcxx >=17 - libprotobuf >=4.25.3,<4.25.4.0a0 license: Apache-2.0 license_family: APACHE purls: [] - size: 478529 - timestamp: 1723788757471 + size: 478730 + timestamp: 1725215444041 - kind: conda name: libarrow-substrait version: 17.0.0 - build: hc9a23c6_8_cpu - build_number: 8 + build: hf54134d_13_cpu + build_number: 13 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hc9a23c6_8_cpu.conda - sha256: c1801c153039cee5d24d2f333fab2b5c402883934930cf3946f9b67d0c9070ea - md5: 613b7846b912a62c4f3e50afc1635707 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda + sha256: 01ff52d5b866f3174018c81dee808fbef1101f2cff05cc5f29c80ff68cc8796c + md5: 46f41533959eee8826c09e55976b8c06 depends: - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - - libarrow 17.0.0 h8756180_8_cpu - - libarrow-acero 17.0.0 he02047a_8_cpu - - libarrow-dataset 17.0.0 he02047a_8_cpu - - libgcc-ng >=12 + - libarrow 17.0.0 h8d2e343_13_cpu + - libarrow-acero 17.0.0 h5888daf_13_cpu + - libarrow-dataset 17.0.0 h5888daf_13_cpu + - libgcc >=13 - libprotobuf >=4.25.3,<4.25.4.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 license: Apache-2.0 license_family: APACHE purls: [] - size: 546426 - timestamp: 1723787723163 + size: 550883 + timestamp: 1725214851656 - kind: conda name: libblas version: 3.9.0 @@ -8605,26 +8651,28 @@ packages: - kind: conda name: libbrotlicommon version: 1.1.0 - build: h0dc2134_1 - build_number: 1 + build: h00291cd_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h0dc2134_1.conda - sha256: f57c57c442ef371982619f82af8735f93a4f50293022cfd1ffaf2ff89c2e0b2a - md5: 9e6c31441c9aa24e41ace40d6151aab6 + url: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h00291cd_2.conda + sha256: b377056470a9fb4a100aa3c51b3581aab6496ba84d21cd99bcc1d5ef0359b1b6 + md5: 58f2c4bdd56c46cc7451596e4ae68e0b + depends: + - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 67476 - timestamp: 1695990207321 + size: 67267 + timestamp: 1725267768667 - kind: conda name: libbrotlicommon version: 1.1.0 - build: hcfcfb64_1 - build_number: 1 + build: h2466b09_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-hcfcfb64_1.conda - sha256: f75fed29b0cc503d1b149a4945eaa32df56e19da5e2933de29e8f03947203709 - md5: f77f319fb82980166569e1280d5b2864 + url: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda + sha256: 33e8851c6cc8e2d93059792cd65445bfe6be47e4782f826f01593898ec95764c + md5: f7dc9a8f21d74eab46456df301da2972 depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -8632,128 +8680,133 @@ packages: license: MIT license_family: MIT purls: [] - size: 70598 - timestamp: 1695990405143 + size: 70526 + timestamp: 1725268159739 - kind: conda name: libbrotlicommon version: 1.1.0 - build: hd590300_1 - build_number: 1 + build: hb9d3cd8_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda - sha256: 40f29d1fab92c847b083739af86ad2f36d8154008cf99b64194e4705a1725d78 - md5: aec6c91c7371c26392a06708a73c70e5 + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda + sha256: d9db2de60ea917298e658143354a530e9ca5f9c63471c65cf47ab39fd2f429e3 + md5: 41b599ed2b02abcfdd84302bff174b23 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 69403 - timestamp: 1695990007212 + size: 68851 + timestamp: 1725267660471 - kind: conda name: libbrotlidec version: 1.1.0 - build: h0dc2134_1 - build_number: 1 + build: h00291cd_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h0dc2134_1.conda - sha256: b11939c4c93c29448660ab5f63273216969d1f2f315dd9be60f3c43c4e61a50c - md5: 9ee0bab91b2ca579e10353738be36063 + url: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h00291cd_2.conda + sha256: 4d49ea72e2f44d2d7a8be5472e4bd0bc2c6b89c55569de2c43576363a0685c0c + md5: 34709a1f5df44e054c4a12ab536c5459 depends: - - libbrotlicommon 1.1.0 h0dc2134_1 + - __osx >=10.13 + - libbrotlicommon 1.1.0 h00291cd_2 license: MIT license_family: MIT purls: [] - size: 30327 - timestamp: 1695990232422 + size: 29872 + timestamp: 1725267807289 - kind: conda name: libbrotlidec version: 1.1.0 - build: hcfcfb64_1 - build_number: 1 + build: h2466b09_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-hcfcfb64_1.conda - sha256: 1b352ee05931ea24c11cd4a994d673890fd1cc690c21e023e736bdaac2632e93 - md5: 19ce3e1dacc7912b3d6ff40690ba9ae0 + url: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda + sha256: 234fc92f4c4f1cf22f6464b2b15bfc872fa583c74bf3ab9539ff38892c43612f + md5: 9bae75ce723fa34e98e239d21d752a7e depends: - - libbrotlicommon 1.1.0 hcfcfb64_1 + - libbrotlicommon 1.1.0 h2466b09_2 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 32788 - timestamp: 1695990443165 + size: 32685 + timestamp: 1725268208844 - kind: conda name: libbrotlidec version: 1.1.0 - build: hd590300_1 - build_number: 1 + build: hb9d3cd8_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda - sha256: 86fc861246fbe5ad85c1b6b3882aaffc89590a48b42d794d3d5c8e6d99e5f926 - md5: f07002e225d7a60a694d42a7bf5ff53f + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda + sha256: 2892d512cad096cb03f1b66361deeab58b64e15ba525d6592bb6d609e7045edf + md5: 9566f0bd264fbd463002e759b8a82401 depends: - - libbrotlicommon 1.1.0 hd590300_1 - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libbrotlicommon 1.1.0 hb9d3cd8_2 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 32775 - timestamp: 1695990022788 + size: 32696 + timestamp: 1725267669305 - kind: conda name: libbrotlienc version: 1.1.0 - build: h0dc2134_1 - build_number: 1 + build: h00291cd_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h0dc2134_1.conda - sha256: bc964c23e1a60ca1afe7bac38a9c1f2af3db4a8072c9f2eac4e4de537a844ac7 - md5: 8a421fe09c6187f0eb5e2338a8a8be6d + url: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h00291cd_2.conda + sha256: 477d236d389473413a1ccd2bec1b66b2f1d2d7d1b4a57bb56421b7b611a56cd1 + md5: 691f0dcb36f1ae67f5c489f20ae987ea depends: - - libbrotlicommon 1.1.0 h0dc2134_1 + - __osx >=10.13 + - libbrotlicommon 1.1.0 h00291cd_2 license: MIT license_family: MIT purls: [] - size: 299092 - timestamp: 1695990259225 + size: 296353 + timestamp: 1725267822076 - kind: conda name: libbrotlienc version: 1.1.0 - build: hcfcfb64_1 - build_number: 1 + build: h2466b09_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-hcfcfb64_1.conda - sha256: eae6b76154e594c6d211160c6d1aeed848672618152a562e0eabdfa641d34aca - md5: 71e890a0b361fd58743a13f77e1506b7 + url: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda + sha256: 3d0dd7ef505962f107b7ea8f894e0b3dd01bf46852b362c8a7fc136b039bc9e1 + md5: 85741a24d97954a991e55e34bc55990b depends: - - libbrotlicommon 1.1.0 hcfcfb64_1 + - libbrotlicommon 1.1.0 h2466b09_2 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 246515 - timestamp: 1695990479484 + size: 245929 + timestamp: 1725268238259 - kind: conda name: libbrotlienc version: 1.1.0 - build: hd590300_1 - build_number: 1 + build: hb9d3cd8_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda - sha256: f751b8b1c4754a2a8dfdc3b4040fa7818f35bbf6b10e905a47d3a194b746b071 - md5: 5fc11c6020d421960607d821310fcd4d + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda + sha256: 779f58174e99de3600e939fa46eddb453ec5d3c60bb46cdaa8b4c127224dbf29 + md5: 06f70867945ea6a84d35836af780f1de depends: - - libbrotlicommon 1.1.0 hd590300_1 - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libbrotlicommon 1.1.0 hb9d3cd8_2 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 282523 - timestamp: 1695990038302 + size: 281750 + timestamp: 1725267679782 - kind: conda name: libcblas version: 3.9.0 @@ -8817,50 +8870,50 @@ packages: - kind: conda name: libclang-cpp18.1 version: 18.1.8 - build: default_hf981a13_2 - build_number: 2 + build: default_hf981a13_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_2.conda - sha256: cdcce55ed6f7b788401af9a21bb31f3529eb14fe72455f9e8d628cd513a14527 - md5: b0f8c590aa86d9bee5987082f7f15bdf + url: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_4.conda + sha256: ec7ed3003f4b1507043f7a4ad85339c7a20898ff213e8f77f51f69c30d76780a + md5: 7b72d74b57e681251536094b96ba9c46 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=12 - libllvm18 >=18.1.8,<18.2.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=12 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 19198047 - timestamp: 1723281150801 + size: 19176386 + timestamp: 1725430019231 - kind: conda name: libclang13 version: 18.1.8 - build: default_h9def88c_2 - build_number: 2 + build: default_h9def88c_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_2.conda - sha256: 0259c1e50d036a1f7c6aac04d1010e01451f0e6370835b644d516cb73e8a164b - md5: ba2d12adbea9de311297f2b577f4bb86 + url: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_4.conda + sha256: 606c82d902a6d343b1b21967d30d73f6d54b8340fe180f2b0641fb775fba91e9 + md5: 7e3f831d4ae9820999418821be65ff67 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=12 - libllvm18 >=18.1.8,<18.2.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=12 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 11017309 - timestamp: 1723281396578 + size: 11017079 + timestamp: 1725430212320 - kind: conda name: libclang13 version: 18.1.8 - build: default_ha5278ca_2 - build_number: 2 + build: default_ha5278ca_4 + build_number: 4 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_2.conda - sha256: 9dc702447517aa7db3222b20cfab3520f382c8a36f875d7ffc0f19fa88bf5122 - md5: 8185207d3f7e59474870cc79e4f9eaa5 + url: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_4.conda + sha256: be74316898c456b0a19fcbbe73f94f6a9459d444317e932a0636882603edae3e + md5: e9d701da6db17a9638be8dc5569b0327 depends: - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 @@ -8870,8 +8923,8 @@ packages: license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 25325593 - timestamp: 1723282347668 + size: 25317731 + timestamp: 1725434281988 - kind: conda name: libcrc32c version: 1.1.2 @@ -8940,12 +8993,12 @@ packages: timestamp: 1689195353551 - kind: conda name: libcurl - version: 8.9.1 - build: h18fefc2_0 + version: 8.10.0 + build: h1ee3ff0_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.9.1-h18fefc2_0.conda - sha256: 024be133aed5f100c0b222761e747cc27a2bdf94af51947ad5f70e88cf824988 - md5: 099a1016d23baa4f41148a985351a7a8 + url: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.0-h1ee3ff0_0.conda + sha256: 2209a888bb1cdd82a359927efb6a949b8359d8e67edea2fcc29a32ce17214871 + md5: e5b4f3d5768b72716f05513d6fa02ba9 depends: - krb5 >=1.21.3,<1.22.0a0 - libssh2 >=1.11.0,<2.0a0 @@ -8956,65 +9009,67 @@ packages: license: curl license_family: MIT purls: [] - size: 339298 - timestamp: 1722440239161 + size: 342210 + timestamp: 1726064608464 - kind: conda name: libcurl - version: 8.9.1 - build: hdb1bdb2_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda - sha256: 0ba60f83709068e9ec1ab543af998cb5a201c8379c871205447684a34b5abfd8 - md5: 7da1d242ca3591e174a3c7d82230d3c0 + version: 8.10.0 + build: h58e7537_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.0-h58e7537_0.conda + sha256: 6b28ed898562ee9e351bbb209fea25c9cd4078f2010223f23dbccc3be0c3d361 + md5: 732abd8f88ee1749239335c2328e5fc3 depends: + - __osx >=10.13 - krb5 >=1.21.3,<1.22.0a0 - - libgcc-ng >=12 - libnghttp2 >=1.58.0,<2.0a0 - libssh2 >=1.11.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - zstd >=1.5.6,<1.6.0a0 license: curl license_family: MIT purls: [] - size: 416057 - timestamp: 1722439924963 + size: 402216 + timestamp: 1726064094965 - kind: conda name: libcurl - version: 8.9.1 - build: hfcf2730_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.9.1-hfcf2730_0.conda - sha256: a7ce066fbb2d34f7948d8e5da30d72ff01f0a5bcde05ea46fa2d647eeedad3a7 - md5: 6ea09f173c46d135ee6d6845fe50a9c0 + version: 8.10.0 + build: hbbe4b11_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.0-hbbe4b11_0.conda + sha256: 7d9e0b7d855b9f0a3083190fb9931d6afb9c669009011bcb35cc3688d992a51a + md5: 657ea309ad90675ef144e7d27a271ab9 depends: + - __glibc >=2.17,<3.0.a0 - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=13 - libnghttp2 >=1.58.0,<2.0a0 - libssh2 >=1.11.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - zstd >=1.5.6,<1.6.0a0 license: curl license_family: MIT purls: [] - size: 397060 - timestamp: 1722440158491 + size: 425003 + timestamp: 1726063912465 - kind: conda name: libcxx version: 18.1.8 - build: heced48a_4 - build_number: 4 + build: hd876a4e_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-heced48a_4.conda - sha256: e6ad2e71bc9f2ee8fdcce7596baf5041941f69be5ffef478aaffd673f0691daf - md5: 7e13da1296840905452340fca10a625b + url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_7.conda + sha256: ca43fcc18bff98cbf456ccc76fe113b2afe01d4156c2899b638fd1bc0323d239 + md5: c346ae5c96382a12563e3b0c403c8c4a depends: - __osx >=10.13 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 1268903 - timestamp: 1723637719063 + size: 439306 + timestamp: 1725403678987 - kind: conda name: libdeflate version: '1.21' @@ -9065,20 +9120,21 @@ packages: timestamp: 1722820232914 - kind: conda name: libdrm - version: 2.4.122 - build: h4ab18f5_0 + version: 2.4.123 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.122-h4ab18f5_0.conda - sha256: 74c59a29b76bafbb022389c7cfa9b33b8becd7879b2c6b25a1a99735bf4e9c81 - md5: bbfc4dbe5e97b385ef088f354d65e563 + url: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda + sha256: 5f274243fc7480b721a4ed6623c72d07b86a508a1363a85f0f16451ab655ace8 + md5: ee605e794bdc14e2b7f84c4faa0d8c2c depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=13 - libpciaccess >=0.18,<0.19.0a0 license: MIT license_family: MIT purls: [] - size: 305483 - timestamp: 1719531428392 + size: 303108 + timestamp: 1724719521496 - kind: conda name: libedit version: 3.1.20191231 @@ -9112,6 +9168,21 @@ packages: purls: [] size: 123878 timestamp: 1597616541093 +- kind: conda + name: libegl + version: 1.7.0 + build: ha4b6fd6_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_0.conda + sha256: d577ab061760e631c2980eb88d6970e43391c461a89fc7cd6f98e2999d626d44 + md5: 35e52d19547cb3265a09c49de146a5ae + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_0 + license: LicenseRef-libglvnd + purls: [] + size: 44492 + timestamp: 1723473193819 - kind: conda name: libev version: '4.33' @@ -9196,51 +9267,58 @@ packages: timestamp: 1685725977222 - kind: conda name: libexpat - version: 2.6.2 - build: h59595ed_0 + version: 2.6.3 + build: h5888daf_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - sha256: 331bb7c7c05025343ebd79f86ae612b9e1e74d2687b8f3179faec234f986ce19 - md5: e7ba12deb7020dd080c6c70e7b6f6a3d + url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda + sha256: 4bb47bb2cd09898737a5211e2992d63c555d63715a07ba56eae0aff31fb89c22 + md5: 59f4c43bb1b5ef1c71946ff2cbf59524 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 constrains: - - expat 2.6.2.* + - expat 2.6.3.* license: MIT license_family: MIT purls: [] - size: 73730 - timestamp: 1710362120304 + size: 73616 + timestamp: 1725568742634 - kind: conda name: libexpat - version: 2.6.2 - build: h63175ca_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.2-h63175ca_0.conda - sha256: 79f612f75108f3e16bbdc127d4885bb74729cf66a8702fca0373dad89d40c4b7 - md5: bc592d03f62779511d392c175dcece64 + version: 2.6.3 + build: hac325c4_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.3-hac325c4_0.conda + sha256: dd22dffad6731c352f4c14603868c9cce4d3b50ff5ff1e50f416a82dcb491947 + md5: c1db99b0a94a2f23bd6ce39e2d314e07 + depends: + - __osx >=10.13 constrains: - - expat 2.6.2.* + - expat 2.6.3.* license: MIT license_family: MIT purls: [] - size: 139224 - timestamp: 1710362609641 + size: 70517 + timestamp: 1725568864316 - kind: conda name: libexpat - version: 2.6.2 - build: h73e2aa4_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda - sha256: a188a77b275d61159a32ab547f7d17892226e7dac4518d2c6ac3ac8fc8dfde92 - md5: 3d1d51c8f716d97c864d12f7af329526 + version: 2.6.3 + build: he0c23c2_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.3-he0c23c2_0.conda + sha256: 9543965d155b8da96fc67dd81705fe5c2571c7c00becc8de5534c850393d4e3c + md5: 21415fbf4d0de6767a621160b43e5dea + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - expat 2.6.2.* + - expat 2.6.3.* license: MIT license_family: MIT purls: [] - size: 69246 - timestamp: 1710362566073 + size: 138992 + timestamp: 1725569106114 - kind: conda name: libffi version: 3.4.2 @@ -9289,31 +9367,50 @@ packages: size: 42063 timestamp: 1636489106777 - kind: conda - name: libgcc-ng + name: libgcc version: 14.1.0 - build: h77fa898_0 + build: h77fa898_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - sha256: b8e869ac96591cda2704bf7e77a301025e405227791a0bddf14a3dac65125538 - md5: ca0fad6a41ddaef54a153b78eccb5037 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 + md5: 002ef4463dd1e2b44a94a4ace468f5d2 depends: - _libgcc_mutex 0.1 conda_forge - _openmp_mutex >=4.5 constrains: - - libgomp 14.1.0 h77fa898_0 + - libgomp 14.1.0 h77fa898_1 + - libgcc-ng ==14.1.0=*_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 846380 + timestamp: 1724801836552 +- kind: conda + name: libgcc-ng + version: 14.1.0 + build: h69a702a_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 + md5: 1efc0ad219877a73ef977af7dbb51f17 + depends: + - libgcc 14.1.0 h77fa898_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 842109 - timestamp: 1719538896937 + size: 52170 + timestamp: 1724801842101 - kind: conda name: libgdal version: 3.9.2 - build: h57928b3_0 + build: h57928b3_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_0.conda - sha256: 6abe6109ac5b6f224d3d68c19193bb253d3e57f1b811fd5cc68c646a51521c12 - md5: 783316600dfb816aa970af85ca22d68c + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_2.conda + sha256: d4f9528a8e256b84e3d0a901a9b77d8a0fa654adda07e270e7fae0164652305e + md5: a38e3c87e1ce87145464716aec93fefc depends: - libgdal-core 3.9.2.* - libgdal-fits 3.9.2.* @@ -9329,18 +9426,18 @@ packages: - libgdal-tiledb 3.9.2.* - libgdal-xls 3.9.2.* license: MIT - license_family: MIT purls: [] - size: 423164 - timestamp: 1723843893315 + size: 423328 + timestamp: 1726098743251 - kind: conda name: libgdal version: 3.9.2 - build: h694c41f_0 + build: h694c41f_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_0.conda - sha256: 60f4201e7b76658c850eb4a677ee7e21f32ba07477eaef6fe319ff066e4e036e - md5: ffa824f80eefd79b4d0791c5fde6cab3 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_2.conda + sha256: 13a3b2dcf7ce090fe778a736cc7bc1034b0609ed6e19b91291b1958767978d64 + md5: abb256d462df471d514b7535eeb211a0 depends: - libgdal-core 3.9.2.* - libgdal-fits 3.9.2.* @@ -9356,18 +9453,18 @@ packages: - libgdal-tiledb 3.9.2.* - libgdal-xls 3.9.2.* license: MIT - license_family: MIT purls: [] - size: 422331 - timestamp: 1723842785993 + size: 422986 + timestamp: 1726095491845 - kind: conda name: libgdal version: 3.9.2 - build: ha770c72_0 + build: ha770c72_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_0.conda - sha256: b528270713d4896bf691feb8f6296bb788d80040ac5da425aba566c97c52d020 - md5: b79bd8b6b31b361a5f14c69806bf11ae + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_2.conda + sha256: 951075e8d6b5f57eee5f9ff8ea02f2e416b32fde2de85a455639960bec9fcc94 + md5: 5ce32492677df51aca4b755bb4f05835 depends: - libgdal-core 3.9.2.* - libgdal-fits 3.9.2.* @@ -9383,18 +9480,18 @@ packages: - libgdal-tiledb 3.9.2.* - libgdal-xls 3.9.2.* license: MIT - license_family: MIT purls: [] - size: 422239 - timestamp: 1723840384755 + size: 422566 + timestamp: 1726093634489 - kind: conda name: libgdal-core version: 3.9.2 - build: h4b9bb65_0 + build: h26ecb72_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h4b9bb65_0.conda - sha256: 49cda99e669aab7bf61a5181ed158580c122c55e3cad0cd4ec55800923097741 - md5: 888e92e405c46a459ef17a77db609f3b + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h26ecb72_2.conda + sha256: d7d0d7f15409286a3ab10bd8740189c659899433b33fdbcad4d62d317dbee908 + md5: 019cc81c6b62de83b4b3e7cd3487fd5d depends: - __osx >=10.13 - blosc >=1.21.6,<2.0a0 @@ -9404,22 +9501,22 @@ packages: - json-c >=0.17,<0.18.0a0 - lerc >=4.0.0,<5.0a0 - libarchive >=3.7.4,<3.8.0a0 - - libcurl >=8.9.1,<9.0a0 - - libcxx >=16 + - libcurl >=8.10.0,<9.0a0 + - libcxx >=17 - libdeflate >=1.21,<1.22.0a0 - - libexpat >=2.6.2,<3.0a0 + - libexpat >=2.6.3,<3.0a0 - libiconv >=1.17,<2.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libkml >=1.3.0,<1.4.0a0 - libpng >=1.6.43,<1.7.0a0 - libspatialite >=5.1.0,<5.2.0a0 - - libsqlite >=3.46.0,<4.0a0 + - libsqlite >=3.46.1,<4.0a0 - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.4.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - proj >=9.4.1,<9.5.0a0 - xerces-c >=3.2.5,<3.3.0a0 @@ -9428,39 +9525,39 @@ packages: constrains: - libgdal 3.9.2.* license: MIT - license_family: MIT purls: [] - size: 8926233 - timestamp: 1723839455978 + size: 8978330 + timestamp: 1726092675670 - kind: conda name: libgdal-core version: 3.9.2 - build: h6b59ad6_0 + build: h2fd8da2_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h6b59ad6_0.conda - sha256: 776fdab4f7aae2c2a0500a86726cb35986852d80cf98e4f64c56fadf8d5dd8d2 - md5: 28e547961c714bce30c4341b2b097304 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h2fd8da2_2.conda + sha256: 01aadb30339abdfe9507319d92384a3fe6acbebe7365a8f51ddff430faa42993 + md5: 953d365d72cc02dae51d7abb28bba3d1 depends: - blosc >=1.21.6,<2.0a0 - geos >=3.12.2,<3.12.3.0a0 - geotiff >=1.7.3,<1.8.0a0 - lerc >=4.0.0,<5.0a0 - libarchive >=3.7.4,<3.8.0a0 - - libcurl >=8.9.1,<9.0a0 + - libcurl >=8.10.0,<9.0a0 - libdeflate >=1.21,<1.22.0a0 - - libexpat >=2.6.2,<3.0a0 + - libexpat >=2.6.3,<3.0a0 - libiconv >=1.17,<2.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libkml >=1.3.0,<1.4.0a0 - libpng >=1.6.43,<1.7.0a0 - libspatialite >=5.1.0,<5.2.0a0 - - libsqlite >=3.46.0,<4.0a0 + - libsqlite >=3.46.1,<4.0a0 - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.4.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - proj >=9.4.1,<9.5.0a0 - ucrt >=10.0.20348.0 @@ -9472,18 +9569,18 @@ packages: constrains: - libgdal 3.9.2.* license: MIT - license_family: MIT purls: [] - size: 7993361 - timestamp: 1723839990488 + size: 8050659 + timestamp: 1726093888991 - kind: conda name: libgdal-core version: 3.9.2 - build: hba09cee_0 + build: hbd1db40_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hba09cee_0.conda - sha256: 62c646983b15d21ce617390512e167756bb665587f18e9d93187c6abdba714a0 - md5: b008d746ae76241d861f78176bb21e67 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hbd1db40_2.conda + sha256: 8bd783a169fec4c427149d5afbe46094ee3591792213097cbe92f563d289e39d + md5: c4bf6b60486027bac8e8845decc98b93 depends: - __glibc >=2.17,<3.0.a0 - blosc >=1.21.6,<2.0a0 @@ -9493,24 +9590,24 @@ packages: - json-c >=0.17,<0.18.0a0 - lerc >=4.0.0,<5.0a0 - libarchive >=3.7.4,<3.8.0a0 - - libcurl >=8.9.1,<9.0a0 + - libcurl >=8.10.0,<9.0a0 - libdeflate >=1.21,<1.22.0a0 - - libexpat >=2.6.2,<3.0a0 - - libgcc-ng >=12 + - libexpat >=2.6.3,<3.0a0 + - libgcc >=13 - libiconv >=1.17,<2.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libkml >=1.3.0,<1.4.0a0 - libpng >=1.6.43,<1.7.0a0 - libspatialite >=5.1.0,<5.2.0a0 - - libsqlite >=3.46.0,<4.0a0 - - libstdcxx-ng >=12 + - libsqlite >=3.46.1,<4.0a0 + - libstdcxx >=13 - libtiff >=4.6.0,<4.7.0a0 - libuuid >=2.38.1,<3.0a0 - libwebp-base >=1.4.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - proj >=9.4.1,<9.5.0a0 - xerces-c >=3.2.5,<3.3.0a0 @@ -9519,18 +9616,18 @@ packages: constrains: - libgdal 3.9.2.* license: MIT - license_family: MIT purls: [] - size: 10240822 - timestamp: 1723838979244 + size: 10460443 + timestamp: 1726092341723 - kind: conda name: libgdal-fits version: 3.9.2 - build: h0a0b71e_0 + build: h0a0b71e_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_0.conda - sha256: 0b8864c02ef99805d316e04c47417028e5cbdd65567ec46abae2a0018f3d4174 - md5: 07fedf0779d906450c27a547cc694505 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_2.conda + sha256: f0c04f460af19b62532984b277c26bbc9aa90cdf1fc8d608ced90c9d4cbb86c3 + md5: 27d1bce97d2e6c584d48b223d6a890e1 depends: - cfitsio >=4.4.1,<4.4.2.0a0 - libgdal-core >=3.9 @@ -9539,96 +9636,96 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 497101 - timestamp: 1723842238547 + size: 496865 + timestamp: 1726096554431 - kind: conda name: libgdal-fits version: 3.9.2 - build: h5d197d2_0 + build: h2000d26_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h5d197d2_0.conda - sha256: e7a901faefe99b0057068b187fff40c87974e90e9d09e2ed815528f858ce9e24 - md5: 0c04699c282efe90e2a821326cdd19a8 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_2.conda + sha256: 99e50299f4fc4bd56004d46bc687e2911951af1eb1d789a2575f41ecc27cf466 + md5: 9c3aba4aca7b18a4bf164e140150c257 depends: - __osx >=10.13 - cfitsio >=4.4.1,<4.4.2.0a0 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT - license_family: MIT purls: [] - size: 467917 - timestamp: 1723841156520 + size: 469488 + timestamp: 1726094128863 - kind: conda name: libgdal-fits version: 3.9.2 - build: hdd6600c_0 + build: h2db6552_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-hdd6600c_0.conda - sha256: 20898cb7bf3a456481ff0aa979f7b3b7ef46f666ba3155ff7ae3c6822583e993 - md5: bc8affb58edbc6fddd3e5bc9724f7507 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_2.conda + sha256: 066ced830899567ba036cd65d332d5bf2bd356a0fc641b056ff72157259e9645 + md5: 6587f4912f1ad57f88221edb03346951 depends: - __glibc >=2.17,<3.0.a0 - cfitsio >=4.4.1,<4.4.2.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 license: MIT - license_family: MIT purls: [] - size: 476516 - timestamp: 1723839776557 + size: 477927 + timestamp: 1726093088453 - kind: conda name: libgdal-grib version: 3.9.2 - build: h385febf_0 + build: h9237131_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h385febf_0.conda - sha256: 7652783793614ae7bb6f27591e1625be66eee13ef39d46b716c1498b4643e6ab - md5: 798a4219bfc732715154f93377ce53b5 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_2.conda + sha256: 8bf83845c8f7c6114fb57e3d63d73e66f4e1457997e7b2a804c66170764e45b9 + md5: 00555b58b0bcca46e7b9e1459be4ccf2 depends: - __osx >=10.13 - libaec >=1.1.3,<2.0a0 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT - license_family: MIT purls: [] - size: 662915 - timestamp: 1723841284058 + size: 663497 + timestamp: 1726094251462 - kind: conda name: libgdal-grib version: 3.9.2 - build: h5f34788_0 + build: hc3b29a1_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-h5f34788_0.conda - sha256: a90080b794c2e11377dcc804932068f143b8f9bdf3191a5b5747346e3dac538c - md5: dba6d64b956bb475916f385ab8a010b6 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_2.conda + sha256: 05e8868e6686eb862d230c36ab6fa333c8862ebb2c86e1f3f0fa47a37ba31bc9 + md5: d010706c1424156e5cece58f1d52f085 depends: - __glibc >=2.17,<3.0.a0 - libaec >=1.1.3,<2.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 license: MIT - license_family: MIT purls: [] - size: 720550 - timestamp: 1723839829373 + size: 723537 + timestamp: 1726093136248 - kind: conda name: libgdal-grib version: 3.9.2 - build: hd2a089b_0 + build: hd2a089b_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_0.conda - sha256: 2b50535a67d59c944585364dd6ce30fad437015c7b3d156914bd6e7bef7664f0 - md5: 7cdc726554fb14c5408f9481d42428ea + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_2.conda + sha256: 78f25abd0e9917d6a17a621c66536933c1c973b3665c5b11be6ea810ce7ca78d + md5: d0dd12f70a21506eb402cd9ec696fb20 depends: - libaec >=1.1.3,<2.0a0 - libgdal-core >=3.9 @@ -9637,18 +9734,18 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 678811 - timestamp: 1723842377557 + size: 678203 + timestamp: 1726096748590 - kind: conda name: libgdal-hdf4 version: 3.9.2 - build: h430f241_0 + build: h430f241_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_0.conda - sha256: 1a6f014b5ec5bec23c27704211660fb35c5160700d269170fb8623c860cd0a3b - md5: 93316cdd55ba4b9e8d496eec7184f3eb + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_2.conda + sha256: 3ed2f67582489316304398bd40fb85976c4ce426bbee363cb38baff637c4f335 + md5: 1037903f70eb07936a09418488e8f226 depends: - hdf4 >=4.2.15,<4.2.16.0a0 - libaec >=1.1.3,<2.0a0 @@ -9658,157 +9755,157 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 562497 - timestamp: 1723842519401 + size: 562233 + timestamp: 1726096927503 - kind: conda name: libgdal-hdf4 version: 3.9.2 - build: h86719f3_0 + build: hbfba102_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-h86719f3_0.conda - sha256: 5042ef12eb0b6929c691088cbb32b863f2c0a2a5af7054ca3bef0d35f74bd90f - md5: c69fe5787c0148b1fea4b6b17f56e874 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_2.conda + sha256: c38e3bc65dc35de96c9f8cfee7ae6606f72214714c53db4581f7f5baf2516007 + md5: 2d081b1f5acdcd3dcf7ed52c1d775a6a depends: - __osx >=10.13 - hdf4 >=4.2.15,<4.2.16.0a0 - libaec >=1.1.3,<2.0a0 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT - license_family: MIT purls: [] - size: 590333 - timestamp: 1723841406613 + size: 591694 + timestamp: 1726094364531 - kind: conda name: libgdal-hdf4 version: 3.9.2 - build: ha39a594_0 + build: hd5ecb85_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-ha39a594_0.conda - sha256: e8ab76f289d3d29399bfae46f0c1c7f82b38b18b3b1aa260f0ca13a5a13c7a37 - md5: 4a1f093874e443f96fc5864117dd742a + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_2.conda + sha256: 7404a1108ff5903bf6b3639948114e017826f875aa7ba9c2604587da846bd656 + md5: cda7d2aa5a64fad327f7878cf5de6a00 depends: - __glibc >=2.17,<3.0.a0 - hdf4 >=4.2.15,<4.2.16.0a0 - libaec >=1.1.3,<2.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 license: MIT - license_family: MIT purls: [] - size: 577028 - timestamp: 1723839878916 + size: 578505 + timestamp: 1726093181011 - kind: conda name: libgdal-hdf5 version: 3.9.2 - build: h513f0eb_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-h513f0eb_0.conda - sha256: f9fa2e2ec38595d3b615d653aef3cc9ae50ef8bff6df604bcbd58b37d3d06cb2 - md5: 954c616bf75cf43b4fe02483fe5de530 + build: h6283f77_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_2.conda + sha256: f87faa9e8133bf89c53ddc1a8f5abadfb41bea3d9d4aa6ed8563a226968d2e68 + md5: 0e6a02e8fa69e395d90eeeb93fb6a325 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - hdf5 >=1.14.3,<1.14.4.0a0 - - libcxx >=16 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 + - libstdcxx >=13 license: MIT - license_family: MIT purls: [] - size: 599621 - timestamp: 1723841551985 + size: 643055 + timestamp: 1726093235520 - kind: conda name: libgdal-hdf5 version: 3.9.2 - build: ha2ed5f0_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-ha2ed5f0_0.conda - sha256: cf41e68c22b5a69eb64291f72083f3d643430110b8314bc992b1920275389044 - md5: 0b2c3f29eed90f34dd2d633c5a54a047 + build: had131a1_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_2.conda + sha256: de844b6400029027fc3826c1967e0381d06d95e76f88261b3a89d9562f0a827a + md5: 06bee2b910714c12fa510f0cf0aeb2e3 depends: - - __glibc >=2.17,<3.0.a0 - hdf5 >=1.14.3,<1.14.4.0a0 - - libgcc-ng >=12 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 639683 - timestamp: 1723839938366 + size: 613268 + timestamp: 1726097135142 - kind: conda name: libgdal-hdf5 version: 3.9.2 - build: had131a1_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_0.conda - sha256: 9f937bcf5e6af19413e0c2b516fc7e04b53860f96c2bca3f9c685b68e61b3101 - md5: 47bd857cbe4ce8f4d4386ef123d6b36d + build: hc0c3446_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_2.conda + sha256: 7ac2b60f99639a662234d39403c9ff3360b6f2ac85f909b25ca86d10ff44b244 + md5: ab9f93b55a1d47ac6d87d9e00f836633 depends: + - __osx >=10.13 - hdf5 >=1.14.3,<1.14.4.0a0 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 613331 - timestamp: 1723842672082 + size: 601423 + timestamp: 1726094496440 - kind: conda name: libgdal-jp2openjpeg version: 3.9.2 - build: h2ebfdf0_0 + build: h1b2c38e_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h2ebfdf0_0.conda - sha256: cf7ddcb3550f0042270eb8390ed214fcf95b087394beda229842bce4471cf48f - md5: 3bc2bc4dbef54a3a0d18e8caca7daf0f + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_2.conda + sha256: c01d32450ce2d2144e4e9070eab08abae1046dd7d8a8576de7e490bffd4668e1 + md5: d5c9b358642090b82d9869e9fc846bc0 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 - openjpeg >=2.5.2,<3.0a0 license: MIT - license_family: MIT purls: [] - size: 468035 - timestamp: 1723839975969 + size: 469108 + timestamp: 1726093273314 - kind: conda name: libgdal-jp2openjpeg version: 3.9.2 - build: hc5f35ca_0 + build: hd77bb1f_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hc5f35ca_0.conda - sha256: 7e45a734b5a89748162b61d801f6cb9aee34705a2d39cdd57bca1e0095a4fed9 - md5: b189322fcb4879fc4277b4af45c8353c + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_2.conda + sha256: 10102b96411dfea446deb6235ecea536d34b59b81cad311648b87d4249c3dc08 + md5: 45031c24274b3035b4877732e192f392 depends: - __osx >=10.13 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - openjpeg >=2.5.2,<3.0a0 license: MIT - license_family: MIT purls: [] - size: 463991 - timestamp: 1723841680350 + size: 464295 + timestamp: 1726094606697 - kind: conda name: libgdal-jp2openjpeg version: 3.9.2 - build: hed4c6cb_0 + build: hed4c6cb_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_0.conda - sha256: 620caa37b38448311e069f56385512b8eb79c823be5e64e01443e16c6ed296d8 - md5: 4f443cd47ae1e17c9e1e218fe0f8840c + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_2.conda + sha256: 5f980b14f1422f721c9142bdb0b33bfb797fd5b57b094056dd0258ce341bed15 + md5: 0df660649e46d4a71bf229f1f413b847 depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 @@ -9817,233 +9914,254 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 497668 - timestamp: 1723842799814 + size: 497665 + timestamp: 1726097310878 - kind: conda name: libgdal-kea version: 3.9.2 - build: h2b45729_0 + build: h1df15e4_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h2b45729_0.conda - sha256: 4211ea682c8604a34f45a1d56529af745264afa89eed13cd1d9ced114e7dd5b9 - md5: afac39347e003f869972582ea1a36616 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_2.conda + sha256: 213b177c42b0ef5328e5fd4dbe27ed34765e49bb3f4eef38c1c43a9e5a7c9932 + md5: d9bfebd28be759b818d401a201357b87 depends: - __glibc >=2.17,<3.0.a0 - hdf5 >=1.14.3,<1.14.4.0a0 - kealib >=1.5.3,<1.6.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libgdal-hdf5 3.9.2.* - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 license: MIT - license_family: MIT purls: [] - size: 480105 - timestamp: 1723840316708 + size: 480381 + timestamp: 1726093574319 - kind: conda name: libgdal-kea version: 3.9.2 - build: h3b8d0bf_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-h3b8d0bf_0.conda - sha256: f053cbb60600d081d3eca8ad4c322da0ffe1de1f78fd483b9d45a1f9af1178f3 - md5: 5971aa24e604517eb3f6312c01268df2 + build: h95b1a77_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_2.conda + sha256: b1a5273b69e75769b7c8eec1caf4420b14b5bbe3d7f0f3ebd26bbbcf63407f8f + md5: 2f388a9b3f0d563f070cd86d3abac645 depends: - - __osx >=10.13 - hdf5 >=1.14.3,<1.14.4.0a0 - kealib >=1.5.3,<1.6.0a0 - - libcxx >=16 - libgdal-core >=3.9 - libgdal-hdf5 3.9.2.* - libkml >=1.3.0,<1.4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 474894 - timestamp: 1723842624126 + size: 518345 + timestamp: 1726098538348 - kind: conda name: libgdal-kea version: 3.9.2 - build: h95b1a77_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_0.conda - sha256: 67be3e486b2af5c48457f631d7fcbd07772c25575686aaf722c7db236f76130b - md5: f574f592b5a150889a71593dc4ed8896 + build: he223473_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_2.conda + sha256: 8687adb1ab399a08f9901dfd6ba22cf38fac91b3b56cafc4efbd846cfaaacc87 + md5: 4f4b68b06d7e4b3fcf5a5999bddb1298 depends: + - __osx >=10.13 - hdf5 >=1.14.3,<1.14.4.0a0 - kealib >=1.5.3,<1.6.0a0 + - libcxx >=17 - libgdal-core >=3.9 - libgdal-hdf5 3.9.2.* - libkml >=1.3.0,<1.4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 517965 - timestamp: 1723843742596 + size: 475299 + timestamp: 1726095352836 - kind: conda name: libgdal-netcdf version: 3.9.2 - build: h3127c03_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-h3127c03_0.conda - sha256: cf9cd8c8b47cc1c57446041ce40ef023d4dd28732df1e4140e7bbb45e1dfdd72 - md5: edef4782574a4d9690b2c60fc49e4fa1 + build: h55e78d3_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_2.conda + sha256: 867372f026665f84ada56bd4b8baa649c2e4060d037ab114d7acfcd6b4d6a766 + md5: a020d9b9f0a667981f2cdc9929e85ec0 depends: - - __osx >=10.13 - hdf4 >=4.2.15,<4.2.16.0a0 - hdf5 >=1.14.3,<1.14.4.0a0 - - libcxx >=16 - libgdal-core >=3.9 - libgdal-hdf4 3.9.2.* - libgdal-hdf5 3.9.2.* - libkml >=1.3.0,<1.4.0a0 - libnetcdf >=4.9.2,<4.9.3.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 689033 - timestamp: 1723842779421 + size: 666378 + timestamp: 1726098736520 - kind: conda name: libgdal-netcdf version: 3.9.2 - build: h55e78d3_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_0.conda - sha256: cd4dfc0e49e07e0760c7989e8265f9d3a9e816cb16d6898417ec8702d5e6decf - md5: a64621d72542c38d3dc62632f334a8d2 + build: he83ae23_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_2.conda + sha256: ae605bc31e3b9503b1e6ae733530fdd1cc21721bc6fead69c3dd2066e6202db1 + md5: 049cd27768fd0735bde2237c2f436e88 depends: + - __osx >=10.13 - hdf4 >=4.2.15,<4.2.16.0a0 - hdf5 >=1.14.3,<1.14.4.0a0 + - libcxx >=17 - libgdal-core >=3.9 - libgdal-hdf4 3.9.2.* - libgdal-hdf5 3.9.2.* - libkml >=1.3.0,<1.4.0a0 - libnetcdf >=4.9.2,<4.9.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 666629 - timestamp: 1723843888623 + size: 692336 + timestamp: 1726095485705 - kind: conda name: libgdal-netcdf version: 3.9.2 - build: h94e7027_0 + build: hf2d2f32_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-h94e7027_0.conda - sha256: ee089ef3995807d874b2be4f266af5871ebea3c9c11c1b4cc8709955b7a1bf89 - md5: 9b5b3c36cdfa48914a6aefca4fbd21fc + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_2.conda + sha256: d7b5a4b57c6231e452c058e1b7921b904a47f6b11fb3d7db3eb3329d7d9fa34d + md5: 23adb723f7ee5d05b2925a6562cdfc35 depends: - __glibc >=2.17,<3.0.a0 - hdf4 >=4.2.15,<4.2.16.0a0 - hdf5 >=1.14.3,<1.14.4.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libgdal-hdf4 3.9.2.* - libgdal-hdf5 3.9.2.* - libkml >=1.3.0,<1.4.0a0 - libnetcdf >=4.9.2,<4.9.3.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 license: MIT - license_family: MIT purls: [] - size: 731831 - timestamp: 1723840381035 + size: 737551 + timestamp: 1726093631219 - kind: conda name: libgdal-pdf version: 3.9.2 - build: h0fa2cb4_0 + build: h600f43f_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h0fa2cb4_0.conda - sha256: 2ed5b3330a3a32512bf071de64f07fc5c434da2eb14915e97a4e9b9e1afb9793 - md5: e05562e39f9d87beaf8ff75da9b3af52 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_2.conda + sha256: d66d7c3e3758c60df4ac4eb2096085f77c6629303b6f869305249db01e2b204b + md5: f55a803c395491ed48baf6d1fd464eb3 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 - - poppler >=24.8.0,<24.9.0a0 + - libstdcxx >=13 + - poppler + license: MIT + purls: [] + size: 667813 + timestamp: 1726093333981 +- kind: conda + name: libgdal-pdf + version: 3.9.2 + build: h85e1e31_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_2.conda + sha256: c9e9a11af7fe7dc2eb306300d7972e2d03e5d3abc0945407deb93026d1749c91 + md5: 95b05a267dc00e4f5d3efc2cb56feea7 + depends: + - __osx >=10.13 + - libcxx >=17 + - libgdal-core >=3.9 + - libkml >=1.3.0,<1.4.0a0 + - poppler license: MIT - license_family: MIT purls: [] - size: 664476 - timestamp: 1723840044891 + size: 610106 + timestamp: 1726094743209 - kind: conda name: libgdal-pdf version: 3.9.2 - build: ha1c78db_0 + build: ha1c78db_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_0.conda - sha256: 042bf63099ffb8c292cb9e0f8201832882f797f4eafbe74550108893ed6fbc16 - md5: 7f30514e9cc9d0dac3306ebdcaf8ac96 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_2.conda + sha256: ae9c94c3129296194f005d0d29f63ec72ddb52cf18ed173fc27b43d50f3ca4d1 + md5: 7546b2895c42bbeb90b59e80dcf28419 depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - poppler >=24.8.0,<24.9.0a0 + - poppler - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 625975 - timestamp: 1723842971120 + size: 626553 + timestamp: 1726097532609 - kind: conda - name: libgdal-pdf + name: libgdal-pg version: 3.9.2 - build: ha7d2355_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-ha7d2355_0.conda - sha256: 41b12419c68ad274489b3a20c0bc71620c1992b25b4f2fcf5d30ec6b47eac5a4 - md5: 341eafadf0536f3fba35516182f8d8da + build: h151b34b_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h151b34b_2.conda + sha256: 2d22ad3addcc207e9db4b9a9d4669e24e8b6aaf874eb44b59db46a6912dab148 + md5: 4719aec6235ddee1407f2359c15a597e depends: - - __osx >=10.13 - - libcxx >=16 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - poppler >=24.8.0,<24.9.0a0 + - libpq >=16.4,<17.0a0 + - libstdcxx >=13 + - postgresql license: MIT - license_family: MIT purls: [] - size: 609616 - timestamp: 1723841824853 + size: 526700 + timestamp: 1726093379300 - kind: conda name: libgdal-pg version: 3.9.2 - build: h1b48671_0 + build: h7ffd8cf_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h1b48671_0.conda - sha256: 736ad8a0b8841b8ad4ce166499affe1c820ba6ff3c4dc85564f9e6859aca151f - md5: f2d026e614b3ed73afb0b054452f9ca1 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7ffd8cf_2.conda + sha256: de450f862f4595949b7c0e4f9594a50d400a049a43a8c6a4abec2b208e906f30 + md5: 5cd82b1f469ec92d3000f537dd9c9c70 depends: - __osx >=10.13 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - libpq >=16.4,<17.0a0 - postgresql license: MIT - license_family: MIT purls: [] - size: 506451 - timestamp: 1723841961792 + size: 507951 + timestamp: 1726094861491 - kind: conda name: libgdal-pg version: 3.9.2 - build: ha693a0f_0 + build: ha693a0f_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_0.conda - sha256: be39296adbf0a232e7e63a1ae6a079e00aa237648caaf1592e10ab07566ade54 - md5: df007d35fdf928f91ea2c32323769e74 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_2.conda + sha256: 9518ddc91776c520b53000aa45195846777671b21c62ce8d129bdb416f94152c + md5: 84c8cbff3d3aa8e9b3a777cf9577636b depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 @@ -10053,59 +10171,59 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 532068 - timestamp: 1723843119430 + size: 532039 + timestamp: 1726097733407 - kind: conda - name: libgdal-pg + name: libgdal-postgisraster version: 3.9.2 - build: he047751_0 + build: h151b34b_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-he047751_0.conda - sha256: 495ba816e1bdeab9f324666a60236436af325c27d0bda0169913062ce25ffd39 - md5: ffc6cb208a39a3eb764032d31a14faa8 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h151b34b_2.conda + sha256: 56a0dd9797bb33fe568a65fda62104fe9605b65d1ee0ad15f238a7b1939d5b1d + md5: 3a508686bc81fd6552ffbcbfac942dde depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - libpq >=16.4,<17.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 - postgresql license: MIT - license_family: MIT purls: [] - size: 524778 - timestamp: 1723840095806 + size: 480156 + timestamp: 1726093423575 - kind: conda name: libgdal-postgisraster version: 3.9.2 - build: h1b48671_0 + build: h7ffd8cf_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h1b48671_0.conda - sha256: 6c8858cbfbc2f65196fcaf76db5f6f68a6b6075e95cbe94798437537fb6442f4 - md5: dde648bd5ce1f9039f09c941e80e715e + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7ffd8cf_2.conda + sha256: bbabb16d210723591ef6a20aa743deefd7e704813a459b2959203d967efb084c + md5: 477603447d6359fc22119bd95b49e98e depends: - __osx >=10.13 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - libpq >=16.4,<17.0a0 - postgresql license: MIT - license_family: MIT purls: [] - size: 469429 - timestamp: 1723842125336 + size: 470037 + timestamp: 1726094979700 - kind: conda name: libgdal-postgisraster version: 3.9.2 - build: ha693a0f_0 + build: ha693a0f_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_0.conda - sha256: b2d37e814425ca1bacc7d378ee6d7499379bc06d2145174ce655b0a15deabda0 - md5: 04f104d188bea8c2013fc6b0036a64d3 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_2.conda + sha256: ab42f085618718d70d7dc3bb2bf0eb472516b6a04b92bad8d90e9bdd791e0533 + md5: 96eea459fa2e4f822f29d2352c10c32c depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 @@ -10115,137 +10233,116 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT - purls: [] - size: 504839 - timestamp: 1723843279030 -- kind: conda - name: libgdal-postgisraster - version: 3.9.2 - build: he047751_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-he047751_0.conda - sha256: 98210defde1734576d5c28950f234af28042f3e0fa386e7c1168893bd55d6e04 - md5: 3fa13117dde6d031e11c7dceba70307e - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libgdal-core >=3.9 - - libkml >=1.3.0,<1.4.0a0 - - libpq >=16.4,<17.0a0 - - libstdcxx-ng >=12 - - postgresql - license: MIT - license_family: MIT purls: [] - size: 479176 - timestamp: 1723840147573 + size: 504874 + timestamp: 1726097929736 - kind: conda name: libgdal-tiledb version: 3.9.2 - build: h9d8aadb_0 + build: h4a3bace_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h9d8aadb_0.conda - sha256: 5b1569d11ca306956176cb3efa2b7700ceebfe31a2a3d55a21bfb97f670cdc05 - md5: fe5cc1e3fb1ff4b9ce7f6c14c1a90930 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda + sha256: f3f2ecc68a847ab644c6ba7d9a38dcdd4996393f851ddaef401fe4d90f0ba8e9 + md5: c3fac34ecba2fcf9d5d31a03b975d5a1 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 - - tiledb >=2.25.0,<2.26.0a0 + - libstdcxx >=13 + - tiledb >=2.26.0,<2.27.0a0 license: MIT - license_family: MIT purls: [] - size: 671547 - timestamp: 1723840222090 + size: 681765 + timestamp: 1726093490312 - kind: conda name: libgdal-tiledb version: 3.9.2 - build: ha63beff_0 + build: h6b11327_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-ha63beff_0.conda - sha256: 0b81a6b963b417609f584a118304293e09219a48e9447385c3ebb4757baad5f6 - md5: 7dc59f75a0ce5c00575ce07ebb8613d1 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_2.conda + sha256: dda569758f13d5ec8d485397abce0a4b6fceedd1e06d10f031f4c5f644dd2709 + md5: 82799fcd51f47381b7398e9521c1ad95 depends: - __osx >=10.13 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - tiledb >=2.25.0,<2.26.0a0 + - tiledb >=2.26.0,<2.27.0a0 license: MIT - license_family: MIT purls: [] - size: 628861 - timestamp: 1723842320643 + size: 630738 + timestamp: 1726095119594 - kind: conda name: libgdal-tiledb version: 3.9.2 - build: hefbb53f_0 + build: hb8b5d01_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hefbb53f_0.conda - sha256: 42da0b1faed24aabbf1ebe263d0b0ec79698584cb3a8be9d139b2e982b1fe372 - md5: 04a2ecccb1cd6c418242ab7724b75f6f + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_2.conda + sha256: 3d9b00940e17134d0c16c6406b68d7b9afa33e49a8c695f11eb0da7d467f4718 + md5: acc3612fa26fab59d18d4075d12c15b0 depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - tiledb >=2.25.0,<2.26.0a0 + - tiledb >=2.26.0,<2.27.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 628563 - timestamp: 1723843463220 + size: 628075 + timestamp: 1726098183036 - kind: conda name: libgdal-xls version: 3.9.2 - build: h062f1c4_0 + build: h03c987c_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h062f1c4_0.conda - sha256: e073a131c15902e10fdb303b3d038ba5943833f0bce2e85bcdff42f8b1a294c1 - md5: 20770850c35b44c305147e5b502a7ac8 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_2.conda + sha256: 2215783e6f3aa153a0c2ff3d9dd2c95481a8153fc2563192a14fff8bf4f85d19 + md5: a19adc2adb5cc91a831838fdd69e895d depends: - __glibc >=2.17,<3.0.a0 - freexl >=2.0.0,<3.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 license: MIT - license_family: MIT purls: [] - size: 434267 - timestamp: 1723840265117 + size: 434313 + timestamp: 1726093528376 - kind: conda name: libgdal-xls version: 3.9.2 - build: h597966e_0 + build: hc33d192_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-h597966e_0.conda - sha256: 551a31b3ba06ed9f1addf25bc73bfbc81f0ea19458fe0e33a0c1c0b6f579eb83 - md5: 5defa44a81597e6c8ae8000b46b604d4 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_2.conda + sha256: af48ecd38ed1b1e0a8a55c7bae5a646f164275f8aba93cc3aaa7939c99b30dcb + md5: 19731e92fa7d594f556519d4b4c40b36 depends: - __osx >=10.13 - freexl >=2.0.0,<3.0a0 - - libcxx >=16 + - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT - license_family: MIT purls: [] - size: 431833 - timestamp: 1723842473320 + size: 431816 + timestamp: 1726095230562 - kind: conda name: libgdal-xls version: 3.9.2 - build: hd0e23a6_0 + build: hd0e23a6_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_0.conda - sha256: 3627ab430a898aec0f8228b6172b1f89d9e4458935b9704a9f3d389238af8c25 - md5: 3060a72f5eca3c4dc3878b35215eb425 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_2.conda + sha256: 9dc2e89ab5d598d20f29ebda3a90655b18c2ae7b088b01bd36ac9e19fb0d7df5 + md5: 7f4f627d1a972d3b98d6441b21c17db4 depends: - freexl >=2.0.0,<3.0a0 - libgdal-core >=3.9 @@ -10254,10 +10351,9 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT - license_family: MIT purls: [] - size: 466107 - timestamp: 1723843600322 + size: 466183 + timestamp: 1726098349141 - kind: conda name: libgfortran version: 5.0.0 @@ -10274,21 +10370,40 @@ packages: purls: [] size: 110106 timestamp: 1707328956438 +- kind: conda + name: libgfortran + version: 14.1.0 + build: h69a702a_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda + sha256: ed77f04f873e43a26e24d443dd090631eedc7d0ace3141baaefd96a123e47535 + md5: 591e631bc1ae62c64f2ab4f66178c097 + depends: + - libgfortran5 14.1.0 hc5f4f2c_1 + constrains: + - libgfortran-ng ==14.1.0=*_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 52142 + timestamp: 1724801872472 - kind: conda name: libgfortran-ng version: 14.1.0 - build: h69a702a_0 + build: h69a702a_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_0.conda - sha256: ef624dacacf97b2b0af39110b36e2fd3e39e358a1a6b7b21b85c9ac22d8ffed9 - md5: f4ca84fbd6d06b0a052fb2d5b96dde41 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda + sha256: a2dc35cb7f87bb5beebf102d4085574c6a740e1df58e743185d4434cc5e4e0ae + md5: 16cec94c5992d7f42ae3f9fa8b25df8d depends: - - libgfortran5 14.1.0 hc5f4f2c_0 + - libgfortran 14.1.0 h69a702a_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 49893 - timestamp: 1719538933879 + size: 52212 + timestamp: 1724802086021 - kind: conda name: libgfortran5 version: 13.2.0 @@ -10310,20 +10425,37 @@ packages: - kind: conda name: libgfortran5 version: 14.1.0 - build: hc5f4f2c_0 + build: hc5f4f2c_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_0.conda - sha256: a67d66b1e60a8a9a9e4440cee627c959acb4810cb182e089a4b0729bfdfbdf90 - md5: 6456c2620c990cd8dde2428a27ba0bc5 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda + sha256: c40d7db760296bf9c776de12597d2f379f30e890b9ae70c1de962ff2aa1999f6 + md5: 10a0cef64b784d6ab6da50ebca4e984d depends: - - libgcc-ng >=14.1.0 + - libgcc >=14.1.0 constrains: - - libgfortran-ng 14.1.0 + - libgfortran 14.1.0 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 1457561 - timestamp: 1719538909168 + size: 1459939 + timestamp: 1724801851300 +- kind: conda + name: libgl + version: 1.7.0 + build: ha4b6fd6_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_0.conda + sha256: 993f3bfe04e16c58fceab108bf54f1522ff93a657a22a4ced8c56658001d55fa + md5: 3deca8c25851196c28d1c84dd4ae9149 + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_0 + - libglx 1.7.0 ha4b6fd6_0 + license: LicenseRef-libglvnd + purls: [] + size: 132746 + timestamp: 1723473216625 - kind: conda name: libglib version: 2.80.3 @@ -10392,21 +10524,52 @@ packages: purls: [] size: 3674504 timestamp: 1723209150363 +- kind: conda + name: libglvnd + version: 1.7.0 + build: ha4b6fd6_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_0.conda + sha256: ce35ceca19110ba9d27cb0058e55c62ea0489b3dfad76d016df2d0bf4f027998 + md5: e46b5ae31282252e0525713e34ffbe2b + depends: + - __glibc >=2.17,<3.0.a0 + license: LicenseRef-libglvnd + purls: [] + size: 129500 + timestamp: 1723473188457 +- kind: conda + name: libglx + version: 1.7.0 + build: ha4b6fd6_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_0.conda + sha256: 72ba2a55de3d8902b40359433bbc51f50574067eaf2ae4081a2347d3735e30bb + md5: b470cc353c5b852e0d830e8d5d23e952 + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_0 + - xorg-libx11 >=1.8.9,<2.0a0 + license: LicenseRef-libglvnd + purls: [] + size: 79343 + timestamp: 1723473207891 - kind: conda name: libgomp version: 14.1.0 - build: h77fa898_0 + build: h77fa898_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - sha256: 7699df61a1f6c644b3576a40f54791561f2845983120477a16116b951c9cdb05 - md5: ae061a5ed5f05818acdf9adab72c146d + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda + sha256: c96724c8ae4ee61af7674c5d9e5a3fbcf6cd887a40ad5a52c99aa36f1d4f9680 + md5: 23c255b008c4f2ae008f81edcabaca89 depends: - _libgcc_mutex 0.1 conda_forge license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 456925 - timestamp: 1719538796073 + size: 460218 + timestamp: 1724801743478 - kind: conda name: libgoogle-cloud version: 2.28.0 @@ -10776,12 +10939,12 @@ packages: - kind: conda name: libkml version: 1.3.0 - build: h538826c_1020 - build_number: 1020 + build: h538826c_1021 + build_number: 1021 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1020.conda - sha256: 2f20949d50302bddfd4b6c9bb2cd91a02c97ce5a36fab552f2eacad53a71c113 - md5: fddbd8a22ee5700bc07e978e25c10ef1 + url: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda + sha256: 81a6096a2db500f0c3527ae59398eacca0634c3381559713ab28022d711dd3bd + md5: 431ec3b40b041576811641e2d643954e depends: - libexpat >=2.6.2,<3.0a0 - libzlib >=1.3.1,<2.0a0 @@ -10792,49 +10955,49 @@ packages: license: BSD-3-Clause license_family: BSD purls: [] - size: 1655764 - timestamp: 1720690303546 + size: 1651104 + timestamp: 1724667610262 - kind: conda name: libkml version: 1.3.0 - build: hbbc8833_1020 - build_number: 1020 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hbbc8833_1020.conda - sha256: 5bd19933cb3790ec8632c11fa67c25d82654bea6c2bc4f51f8bcd8b122ae96c8 - md5: 6d76c5822cb38bc1ab5a06565c6cf626 + build: h9ee1731_1021 + build_number: 1021 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libkml-1.3.0-h9ee1731_1021.conda + sha256: dba3732e9a3b204e5af01c5ddba8630f4a337693b1c5375c2981a88b580116bd + md5: b098eeacf7e78f09c8771f5088b97bbb depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 + - libcxx >=17 - libexpat >=2.6.2,<3.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - libzlib >=1.3.1,<2.0a0 - uriparser >=0.9.8,<1.0a0 license: BSD-3-Clause license_family: BSD purls: [] - size: 395723 - timestamp: 1720690222714 + size: 286877 + timestamp: 1724667518323 - kind: conda name: libkml version: 1.3.0 - build: hfcbc525_1020 - build_number: 1020 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libkml-1.3.0-hfcbc525_1020.conda - sha256: 20dec455f668ab2527d6a20204599253ac0b2d4d0325e4a1ce2316850128cc3e - md5: 055d429f351b79c0a7b7d7e39ff45b28 + build: hf539b9f_1021 + build_number: 1021 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda + sha256: 721c3916d41e052ffd8b60e77f2da6ee47ff0d18babfca48ccf93606f1e0656a + md5: e8c7620cc49de0c6a2349b6dd6e39beb depends: - - __osx >=10.13 - - libcxx >=16 + - __glibc >=2.17,<3.0.a0 - libexpat >=2.6.2,<3.0a0 + - libgcc-ng >=13 + - libstdcxx-ng >=13 - libzlib >=1.3.1,<2.0a0 - uriparser >=0.9.8,<1.0a0 license: BSD-3-Clause license_family: BSD purls: [] - size: 285823 - timestamp: 1720690426491 + size: 402219 + timestamp: 1724667059411 - kind: conda name: liblapack version: 3.9.0 @@ -11141,65 +11304,65 @@ packages: - kind: conda name: libparquet version: 17.0.0 - build: ha915800_8_cpu - build_number: 8 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_8_cpu.conda - sha256: 485c7787e97a4ade061c8ca77a93c58d2329f61843e8e5c3782d62a5171e7450 - md5: d1f5e781ecd7a985e576b6c824bf1183 + build: h39682fd_13_cpu + build_number: 13 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda + sha256: 3c63b7391275cf6cf2a18d2dba3c30c16dd9d210373d206675e342b084cccdf4 + md5: 49c60a8dc089d8127b9368e9eb6c1a77 depends: - - libarrow 17.0.0 h6e8cf4f_8_cpu + - __glibc >=2.17,<3.0.a0 + - libarrow 17.0.0 h8d2e343_13_cpu + - libgcc >=13 + - libstdcxx >=13 - libthrift >=0.20.0,<0.20.1.0a0 - openssl >=3.3.1,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: [] - size: 805336 - timestamp: 1723788013786 + size: 1189824 + timestamp: 1725214804075 - kind: conda name: libparquet version: 17.0.0 - build: haa1307c_8_cpu - build_number: 8 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-haa1307c_8_cpu.conda - sha256: 982504e37b8176e9b6e6ca984ce1f5946d5b2d6ae4a63ca04309668cdd0cb2e7 - md5: 7a1e06213539848b4d4b624a0f6307b8 + build: ha915800_13_cpu + build_number: 13 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_13_cpu.conda + sha256: 8cf6d193600b4dd6cb1a8fbdea168ef6bddbf8ca1ee57d08ce6992df71a62670 + md5: 30b08e672c5dcd827ce7b44f01f4821e depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h8756180_8_cpu - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libarrow 17.0.0 h29daf90_13_cpu - libthrift >=0.20.0,<0.20.1.0a0 - openssl >=3.3.1,<4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: [] - size: 1171833 - timestamp: 1723787669273 + size: 805417 + timestamp: 1725215420059 - kind: conda name: libparquet version: 17.0.0 - build: hf1b0f52_8_cpu - build_number: 8 + build: hf1b0f52_13_cpu + build_number: 13 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_8_cpu.conda - sha256: 751656c1e2a516e50eb9df838784569543c4c5613d64689baefc4dc5d0111698 - md5: cb4b2b2048ad593a660a4415e8a3c9d5 + url: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_13_cpu.conda + sha256: f4b5b4e32cc6ffed205594b8db0764b34b896d4080473f271ff893ca44b872e9 + md5: 303a154bbc5ce01673f6b83cf20da30a depends: - __osx >=10.13 - - libarrow 17.0.0 h2952479_8_cpu + - libarrow 17.0.0 ha60c65e_13_cpu - libcxx >=17 - libthrift >=0.20.0,<0.20.1.0a0 - openssl >=3.3.1,<4.0a0 license: Apache-2.0 license_family: APACHE purls: [] - size: 926034 - timestamp: 1723788527100 + size: 925660 + timestamp: 1725215237883 - kind: conda name: libpciaccess version: '0.18' @@ -11264,44 +11427,47 @@ packages: - kind: conda name: libpq version: '16.4' - build: h4501773_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h4501773_0.conda - sha256: 78a8a32ad19db17433154c9b8e46dc3cb4d959468e1bb6efdf6713064e0fff85 - md5: 335a29dcc53a5eebe256614aec21a7ef + build: h2d7952a_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda + sha256: f7a425b8bc94a541f9c43120734305705ffaa3054470e49fbdea0f166fc3f371 + md5: 7e3173fd1299939a02ebf9ec32aa77c4 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=13 - openssl >=3.3.1,<4.0a0 license: PostgreSQL purls: [] - size: 2331318 - timestamp: 1723137187682 + size: 2510669 + timestamp: 1724948449731 - kind: conda name: libpq version: '16.4' - build: h482b261_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h482b261_0.conda - sha256: ee0b6da5888020a9f200e83da1a4c493baeeb1d339ed7edd9ca5e01c7110628b - md5: 0f74c5581623f860e7baca042d9d7139 + build: h75a757a_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h75a757a_1.conda + sha256: 161d92de944fefc60414b44f1672d2917dac1e5996f9363635301589b5ee0a94 + md5: 3316ac3fbb20afd3e2a18d6c4264885f depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - krb5 >=1.21.3,<1.22.0a0 - - libgcc-ng >=12 - openssl >=3.3.1,<4.0a0 license: PostgreSQL purls: [] - size: 2485441 - timestamp: 1723136722236 + size: 2340921 + timestamp: 1724948593326 - kind: conda name: libpq version: '16.4' - build: hab9416b_0 + build: hab9416b_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_0.conda - sha256: 8c69f0a12b0586561443ffaa1576fe3111b658ee84da677441edbfd41a460cc4 - md5: da3b250050e7c8888fc3b94b004df87e + url: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_1.conda + sha256: cc3adc6165e65fa3eabf48219e22bf33f7afe98369f88c5ba0629b4958b61067 + md5: 6b8e08902d6f4d581e42f9862ba2bc2a depends: - krb5 >=1.21.3,<1.22.0a0 - openssl >=3.3.1,<4.0a0 @@ -11310,8 +11476,8 @@ packages: - vc14_runtime >=14.29.30139 license: PostgreSQL purls: [] - size: 3464306 - timestamp: 1723137790847 + size: 3498610 + timestamp: 1724949283448 - kind: conda name: libprotobuf version: 4.25.3 @@ -11492,48 +11658,48 @@ packages: timestamp: 1720347819147 - kind: conda name: libsodium - version: 1.0.18 - build: h36c2ea0_1 - build_number: 1 + version: 1.0.20 + build: h4ab18f5_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2 - sha256: 53da0c8b79659df7b53eebdb80783503ce72fb4b10ed6e9e05cc0e9e4207a130 - md5: c3788462a6fbddafdb413a9f9053e58d + url: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda + sha256: 0105bd108f19ea8e6a78d2d994a6d4a8db16d19a41212070d2d1d48a63c34161 + md5: a587892d3c13b6621a6091be690dbca2 depends: - - libgcc-ng >=7.5.0 + - libgcc-ng >=12 license: ISC purls: [] - size: 374999 - timestamp: 1605135674116 + size: 205978 + timestamp: 1716828628198 - kind: conda name: libsodium - version: 1.0.18 - build: h8d14728_1 - build_number: 1 + version: 1.0.20 + build: hc70643c_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.18-h8d14728_1.tar.bz2 - sha256: ecc463f0ab6eaf6bc5bd6ff9c17f65595de6c7a38db812222ab8ffde0d3f4bc2 - md5: 5c1fb45b5e2912c19098750ae8a32604 + url: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda + sha256: 7bcb3edccea30f711b6be9601e083ecf4f435b9407d70fc48fbcf9e5d69a0fc6 + md5: 198bb594f202b205c7d18b936fa4524f depends: - - vc >=14.1,<15.0a0 - - vs2015_runtime >=14.16.27012 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: ISC purls: [] - size: 713431 - timestamp: 1605135918736 + size: 202344 + timestamp: 1716828757533 - kind: conda name: libsodium - version: 1.0.18 - build: hbcb3906_1 - build_number: 1 + version: 1.0.20 + build: hfdf4475_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.18-hbcb3906_1.tar.bz2 - sha256: 2da45f14e3d383b4b9e3a8bacc95cd2832aac2dbf9fbc70d255d384a310c5660 - md5: 24632c09ed931af617fe6d5292919cab + url: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.20-hfdf4475_0.conda + sha256: d3975cfe60e81072666da8c76b993af018cf2e73fe55acba2b5ba0928efaccf5 + md5: 6af4b059e26492da6013e79cbcb4d069 + depends: + - __osx >=10.13 license: ISC purls: [] - size: 528765 - timestamp: 1605135849110 + size: 210249 + timestamp: 1716828641383 - kind: conda name: libspatialite version: 5.1.0 @@ -11620,50 +11786,51 @@ packages: timestamp: 1722338108366 - kind: conda name: libsqlite - version: 3.46.0 - build: h1b8f9f3_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.0-h1b8f9f3_0.conda - sha256: 63af1a9e3284c7e4952364bafe7267e41e2d9d8bcc0e85a4ea4b0ec02d3693f6 - md5: 5dadfbc1a567fe6e475df4ce3148be09 - depends: - - __osx >=10.13 - - libzlib >=1.2.13,<2.0a0 - license: Unlicense - purls: [] - size: 908643 - timestamp: 1718050720117 -- kind: conda - name: libsqlite - version: 3.46.0 + version: 3.46.1 build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.0-h2466b09_0.conda - sha256: 662bd7e0d63c5b8c31cca19b91649e798319b93568a2ba8d1375efb91eeb251b - md5: 951b0a3a463932e17414cd9f047fa03d + url: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.1-h2466b09_0.conda + sha256: ef83f90961630bc54a95e48062b05cf9c9173a822ea01784288029613a45eea4 + md5: 8a7c1ad01f58623bfbae8d601db7cf3b depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Unlicense purls: [] - size: 876677 - timestamp: 1718051113874 + size: 876666 + timestamp: 1725354171439 +- kind: conda + name: libsqlite + version: 3.46.1 + build: h4b8f8c9_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.1-h4b8f8c9_0.conda + sha256: 1d075cb823f0cad7e196871b7c57961d669cbbb6cd0e798bf50cbf520dda65fb + md5: 84de0078b58f899fc164303b0603ff0e + depends: + - __osx >=10.13 + - libzlib >=1.3.1,<2.0a0 + license: Unlicense + purls: [] + size: 908317 + timestamp: 1725353652135 - kind: conda name: libsqlite - version: 3.46.0 - build: hde9e2c9_0 + version: 3.46.1 + build: hadc24fc_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - sha256: daee3f68786231dad457d0dfde3f7f1f9a7f2018adabdbb864226775101341a8 - md5: 18aa975d2094c34aef978060ae7da7d8 + url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda + sha256: 9851c049abafed3ee329d6c7c2033407e2fc269d33a75c071110ab52300002b0 + md5: 36f79405ab16bf271edb55b213836dac depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libzlib >=1.3.1,<2.0a0 license: Unlicense purls: [] - size: 865346 - timestamp: 1718050628718 + size: 865214 + timestamp: 1725353659783 - kind: conda name: libssh2 version: 1.11.0 @@ -11716,78 +11883,100 @@ packages: purls: [] size: 259556 timestamp: 1685837820566 +- kind: conda + name: libstdcxx + version: 14.1.0 + build: hc0a3c3a_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda + sha256: 44decb3d23abacf1c6dd59f3c152a7101b7ca565b4ef8872804ceaedcc53a9cd + md5: 9dbb9699ea467983ba8a4ba89b08b066 + depends: + - libgcc 14.1.0 h77fa898_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 3892781 + timestamp: 1724801863728 - kind: conda name: libstdcxx-ng version: 14.1.0 - build: hc0a3c3a_0 + build: h4852527_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-hc0a3c3a_0.conda - sha256: 88c42b388202ffe16adaa337e36cf5022c63cf09b0405cf06fc6aeacccbe6146 - md5: 1cb187a157136398ddbaae90713e2498 + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda + sha256: a2dc44f97290740cc187bfe94ce543e6eb3c2ea8964d99f189a1d8c97b419b8c + md5: bd2598399a70bb86d8218e95548d735e depends: - - libgcc-ng 14.1.0 h77fa898_0 + - libstdcxx 14.1.0 hc0a3c3a_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 3881307 - timestamp: 1719538923443 + size: 52219 + timestamp: 1724801897766 - kind: conda name: libthrift version: 0.20.0 - build: h87f9345_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h87f9345_0.conda - sha256: 349e22e681f84e25513fd45c059cf0a860d66dc17b9034d2e8bf1ad85e052168 - md5: 4e157195a0dec016d880ab19fd002ff3 + build: h0e7cc3e_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda + sha256: 3e70dfda31a3ce28310c86cc0001f20abb78c917502e12c94285a1337fe5b9f0 + md5: d0ed81c4591775b70384f4cc78e05cd1 depends: - - libcxx >=16 + - __glibc >=2.17,<3.0.a0 - libevent >=2.1.12,<2.1.13.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.1,<4.0a0 + - libgcc-ng >=13 + - libstdcxx-ng >=13 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.1,<4.0a0 license: Apache-2.0 license_family: APACHE purls: [] - size: 323451 - timestamp: 1711309231954 + size: 417404 + timestamp: 1724652349098 - kind: conda name: libthrift version: 0.20.0 - build: ha2b3283_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-ha2b3283_0.conda - sha256: 5d9c7caea0624039c5188f5512f4b8d37739939f1b7e516a31de9105b355e97d - md5: dcce79df24f6e6c136bec7b1a1e24a35 + build: h75589b3_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h75589b3_1.conda + sha256: a1f40fcb9970fbfd6d0b825841b4127cf7dd7c54199d0b49bdbcd838b66f3b7a + md5: c20b01aa07ece86a237c580f7ba56923 depends: + - __osx >=10.13 + - libcxx >=17 - libevent >=2.1.12,<2.1.13.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.1,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.1,<4.0a0 license: Apache-2.0 license_family: APACHE purls: [] - size: 613588 - timestamp: 1711309345316 + size: 324391 + timestamp: 1724657549149 - kind: conda name: libthrift version: 0.20.0 - build: hb90f79a_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-hb90f79a_0.conda - sha256: 9b965ef532946382b21de7dc046e9a5ad4ed50160ca9c422bd7f0ac8c8549a64 - md5: 9ce07c1750e779c9d4cc968047f78b0d + build: hbe90ef8_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-hbe90ef8_1.conda + sha256: 77f92cbacb886f671fdf0bc2fac13f423ba442d0c3171ce3e573ed05f5c8980e + md5: e9f49c00773250da4f622694b7f83f25 depends: - libevent >=2.1.12,<2.1.13.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.1,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.1,<4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: [] - size: 409480 - timestamp: 1711308803685 + size: 612714 + timestamp: 1724653005481 - kind: conda name: libtiff version: 4.6.0 @@ -11972,28 +12161,31 @@ packages: - kind: conda name: libxcb version: '1.16' - build: h0dc2134_0 + build: h00291cd_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h0dc2134_0.conda - sha256: c64277f586b716d5c34947e7f2783ef0d24f239a136bc6a024e854bede0389a9 - md5: 07e80289d4ba724f37b4b6f001f88fbe + url: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h00291cd_1.conda + sha256: 2cd6b74fa4b3ef9a3fe7f92271eb34346af673509aa86739e9f04bf72015f841 + md5: c989b18131ab79fdc67e42473d53d545 depends: + - __osx >=10.13 - pthread-stubs - xorg-libxau >=1.0.11,<2.0a0 - xorg-libxdmcp license: MIT license_family: MIT purls: [] - size: 322676 - timestamp: 1693089168477 + size: 323886 + timestamp: 1724419422116 - kind: conda name: libxcb version: '1.16' - build: hcd874cb_0 + build: h013a479_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-hcd874cb_0.conda - sha256: 3b1f3b04baa370cfb1c350cfa829e6236519df5f03e3f57ea2cb2eb044eb8616 - md5: 7c1217d3b075f195ab17370f2d550f5d + url: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-h013a479_1.conda + sha256: abae56e12a4c62730b899fdfb82628a9ac171c4ce144fc9f34ae024957a82a0e + md5: f0b599acdc82d5bc7e3b105833e7c5c8 depends: - m2w64-gcc-libs - m2w64-gcc-libs-core @@ -12003,26 +12195,28 @@ packages: license: MIT license_family: MIT purls: [] - size: 989932 - timestamp: 1693089470750 + size: 989459 + timestamp: 1724419883091 - kind: conda name: libxcb version: '1.16' - build: hd590300_0 + build: hb9d3cd8_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hd590300_0.conda - sha256: 7180375f37fd264bb50672a63da94536d4abd81ccec059e932728ae056324b3a - md5: 151cba22b85a989c2d6ef9633ffee1e4 + url: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda + sha256: 33aa5fc997468b07ab3020b142eacc5479e4e2c2169f467b20ab220f33dd08de + md5: 3601598f0db0470af28985e3e7ad0158 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=13 - pthread-stubs - xorg-libxau >=1.0.11,<2.0a0 - xorg-libxdmcp license: MIT license_family: MIT purls: [] - size: 394932 - timestamp: 1693088990429 + size: 395570 + timestamp: 1724419104778 - kind: conda name: libxcrypt version: 4.4.36 @@ -12289,11 +12483,12 @@ packages: - kind: conda name: llvmlite version: 0.43.0 - build: py312h1f7db74_0 + build: py312h1f7db74_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_0.conda - sha256: a1489f318bf7d0167c79c3646a59b81636917712f4c6f43814cf15571dfdfa87 - md5: 5570862384bf307f15c36ed69dd9fa97 + url: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_1.conda + sha256: 77e37e8b6223d185e1a3a1dfda5c5d9eb940e4935d06de3bab74c881b69ac873 + md5: 570a33dbbfdb2f497cac407f41a8e1b7 depends: - libzlib >=1.3.1,<2.0a0 - python >=3.12,<3.13.0a0 @@ -12306,20 +12501,22 @@ packages: license_family: BSD purls: - pkg:pypi/llvmlite?source=hash-mapping - size: 17121144 - timestamp: 1718324901094 + size: 17112697 + timestamp: 1725305550641 - kind: conda name: llvmlite version: 0.43.0 - build: py312h9c5d478_0 + build: py312h374181b_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h9c5d478_0.conda - sha256: 02a2a1896e80ec8c17c186b120fc021d10b24682a810200f62a9aaacdc988463 - md5: b7a63a49d7a0664376b27cd4f052a888 + url: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda + sha256: b260285b29834f9b003e2928d778c19b8ed0ca1aff5aa8aa7ec8f21f9b23c2e4 + md5: ed6ead7e9ab9469629c6cfb363b5c6e2 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - libllvm14 >=14.0.6,<14.1.0a0 - - libstdcxx-ng >=12 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -12327,19 +12524,20 @@ packages: license_family: BSD purls: - pkg:pypi/llvmlite?source=hash-mapping - size: 3437153 - timestamp: 1718324460601 + size: 3442782 + timestamp: 1725305160474 - kind: conda name: llvmlite version: 0.43.0 - build: py312hdeb90da_0 + build: py312hcc8fd36_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hdeb90da_0.conda - sha256: b5f11398b4c64afd4188ec3e99a5e43fbc54f867c208cdde83eb2a306f1576b6 - md5: fbf9baa6234121382858875dd707d700 + url: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hcc8fd36_1.conda + sha256: 07b9d9ffaed74979836e291aa8e8fe5557bedaa5518c902fee8f240c7ab6c8cb + md5: 089bb036b9d118a2deec62822b015269 depends: - __osx >=10.13 - - libcxx >=16 + - libcxx >=17 - libllvm14 >=14.0.6,<14.1.0a0 - libzlib >=1.3.1,<2.0a0 - python >=3.12,<3.13.0a0 @@ -12348,8 +12546,8 @@ packages: license_family: BSD purls: - pkg:pypi/llvmlite?source=hash-mapping - size: 372855 - timestamp: 1718324612808 + size: 369643 + timestamp: 1725305415971 - kind: conda name: locket version: 1.0.0 @@ -12370,52 +12568,57 @@ packages: - kind: conda name: lz4 version: 4.3.3 - build: py312h03f37cb_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312h03f37cb_0.conda - sha256: 6180f0b0c56fa49064291f76b3e4b1b84f3de58aab1264a94cb200f5f46b0178 - md5: 8465027beab0db69006f744be7fb3fb6 + build: py312h0608a1d_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h0608a1d_1.conda + sha256: 4ebd0ffbe8ce40924459cb3bf1837b5e22bcf3bd0cb807a51795b460878a400a + md5: 90e9d18bcbfe59ac7d6064a58432f365 depends: - - libgcc-ng >=12 - lz4-c >=1.9.3,<1.10.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/lz4?source=hash-mapping - size: 39409 - timestamp: 1704831318655 + size: 76980 + timestamp: 1725090008004 - kind: conda name: lz4 version: 4.3.3 - build: py312h594ca44_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h594ca44_0.conda - sha256: c1b699016e3334bf2c475a60c84b31770dd3e4b2cc5eb4b1a0ecf2689cb7d57e - md5: cc004e8b6d7a876f0d541d864d14064c + build: py312h83408cd_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h83408cd_1.conda + sha256: b270abcdf268fc3d2e6d1d5b2fb0837f52485be7b6259fd834d6c3810f43e986 + md5: cf60d8882f24daa555d28c44364f33f2 depends: + - __osx >=10.13 - lz4-c >=1.9.3,<1.10.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/lz4?source=hash-mapping - size: 77168 - timestamp: 1704831874480 + size: 36337 + timestamp: 1725089680758 - kind: conda name: lz4 version: 4.3.3 - build: py312h904eaf1_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h904eaf1_0.conda - sha256: d6761866789708afc396897fe505c52056917d00bb5745f91aa22b4dd8af8d07 - md5: 3d5b987b4090e9dc8ad65c0d536d34ce + build: py312hb3f7f12_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda + sha256: ea8151b4f55fa1f9b8d2220c7193c91f545aa15d44415cddbac9ea1f8782c117 + md5: b99d90ef4e77acdab74828f79705a919 depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - lz4-c >=1.9.3,<1.10.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -12423,8 +12626,8 @@ packages: license_family: BSD purls: - pkg:pypi/lz4?source=hash-mapping - size: 36016 - timestamp: 1704831559047 + size: 39432 + timestamp: 1725089587134 - kind: conda name: lz4-c version: 1.9.4 @@ -12622,7 +12825,7 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/mapclassify?source=compressed-mapping + - pkg:pypi/mapclassify?source=hash-mapping size: 56342 timestamp: 1723589782579 - kind: conda @@ -12646,32 +12849,38 @@ packages: - kind: conda name: markupsafe version: 2.1.5 - build: py312h41838bb_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312h41838bb_0.conda - sha256: 8dc8f31f78d00713300da000b6ebaa1943a17c112f267de310d5c3d82950079c - md5: c4a9c25c09cef3901789ca818d9beb10 + build: py312h4389bb4_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312h4389bb4_1.conda + sha256: e0445364902a4c0ab45b6683a09459b574466198f4ad81919bae4cd291e75208 + md5: 79843153b0fa98a7e63b9d9ed525596b depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 25742 - timestamp: 1706900456837 + size: 29136 + timestamp: 1724959968176 - kind: conda name: markupsafe version: 2.1.5 - build: py312h98912ed_0 + build: py312h66e93f0_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h98912ed_0.conda - sha256: 273d8efd6c089c534ccbede566394c0ac1e265bfe5d89fe76e80332f3d75a636 - md5: 6ff0b9582da2d4a74a1f9ae1f9ce2af6 + url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h66e93f0_1.conda + sha256: 5c88cd6e19437015de16bde30dd25791aca63ac9cbb8d66b65f365ecff1b235b + md5: 80b79ce0d3dc127e96002dfdcec0a2a5 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 constrains: @@ -12680,30 +12889,29 @@ packages: license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 26685 - timestamp: 1706900070330 + size: 26772 + timestamp: 1724959630484 - kind: conda name: markupsafe version: 2.1.5 - build: py312he70551f_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312he70551f_0.conda - sha256: f8690a3c87e2e96cebd434a829bb95cac43afe6c439530b336dc3452fe4ce4af - md5: 4950a739b19edaac1ed29ca9474e49ac + build: py312hb553811_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312hb553811_1.conda + sha256: 2382cc541f3bbe912180861754aceb2ed180004e361a7c66ac2b1a71a7c2fba8 + md5: 2b9fc64d656299475c648d7508e14943 depends: + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 29060 - timestamp: 1706900374745 + size: 25414 + timestamp: 1724959688117 - kind: conda name: matplotlib version: 3.9.2 @@ -12720,8 +12928,7 @@ packages: - tornado >=5 license: PSF-2.0 license_family: PSF - purls: - - pkg:pypi/matplotlib?source=compressed-mapping + purls: [] size: 9152 timestamp: 1723760944640 - kind: conda @@ -12740,8 +12947,7 @@ packages: - tornado >=5 license: PSF-2.0 license_family: PSF - purls: - - pkg:pypi/matplotlib?source=compressed-mapping + purls: [] size: 8747 timestamp: 1723759696471 - kind: conda @@ -12759,8 +12965,7 @@ packages: - tornado >=5 license: PSF-2.0 license_family: PSF - purls: - - pkg:pypi/matplotlib?source=compressed-mapping + purls: [] size: 8799 timestamp: 1723759810727 - kind: conda @@ -12792,7 +12997,7 @@ packages: license: PSF-2.0 license_family: PSF purls: - - pkg:pypi/matplotlib?source=compressed-mapping + - pkg:pypi/matplotlib?source=hash-mapping size: 7905104 timestamp: 1723759753087 - kind: conda @@ -12826,7 +13031,7 @@ packages: license: PSF-2.0 license_family: PSF purls: - - pkg:pypi/matplotlib?source=compressed-mapping + - pkg:pypi/matplotlib?source=hash-mapping size: 7904910 timestamp: 1723759675614 - kind: conda @@ -12859,7 +13064,7 @@ packages: license: PSF-2.0 license_family: PSF purls: - - pkg:pypi/matplotlib?source=compressed-mapping + - pkg:pypi/matplotlib?source=hash-mapping size: 7747918 timestamp: 1723760858160 - kind: conda @@ -13017,50 +13222,51 @@ packages: timestamp: 1716561374449 - kind: conda name: msgpack-python - version: 1.0.8 - build: py312h2492b07_0 + version: 1.1.0 + build: py312h68727a3_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.8-py312h2492b07_0.conda - sha256: 3761f57834ae20e49b4665b341057cf8ac2641d6f87e76d3d5cc615bc0dae8cc - md5: 0df463266eaaa1b8a35f8fd26368c1a1 + url: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda + sha256: 4bc53333774dea1330643b7e23aa34fd6880275737fc2e07491795872d3af8dd + md5: 5c9b020a3f86799cdc6115e55df06146 depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: Apache purls: - pkg:pypi/msgpack?source=hash-mapping - size: 103653 - timestamp: 1715670786268 + size: 105271 + timestamp: 1725975182669 - kind: conda name: msgpack-python - version: 1.0.8 - build: py312hc3c9ca0_0 + version: 1.1.0 + build: py312hc5c4d5f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.0.8-py312hc3c9ca0_0.conda - sha256: d48287594d4c4a9323deb2f505c52f53f757981d4d16b22231f8831bd22349bf - md5: 87927f3f0037c19ac74ac3f820c26bd1 + url: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.1.0-py312hc5c4d5f_0.conda + sha256: d12f400fb57eef8aae8a8b2a3c4d4917130b9bd8f08a631646e3bf4a6551bb54 + md5: 3448a4ca65790764c2f8d44d5f917f84 depends: - __osx >=10.13 - - libcxx >=16 + - libcxx >=17 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: Apache purls: - pkg:pypi/msgpack?source=hash-mapping - size: 91736 - timestamp: 1715670793021 + size: 90548 + timestamp: 1725975181015 - kind: conda name: msgpack-python - version: 1.0.8 + version: 1.1.0 build: py312hd5eb7cc_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.0.8-py312hd5eb7cc_0.conda - sha256: 080fad891281a38ff05d417ed4aa59b093d7c5fbb232cd3498dc100baacd8e44 - md5: 83bdd6554fb4bf25195c0dacabeeebf3 + url: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda + sha256: 3fd45d9c0830e931e34990cb90e88ba53cc7f89fce69fc7d1a8289639d363e85 + md5: ff4f1e63a6438a06d1ab259936e5c2ac depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -13071,8 +13277,8 @@ packages: license_family: Apache purls: - pkg:pypi/msgpack?source=hash-mapping - size: 88758 - timestamp: 1715671314905 + size: 88169 + timestamp: 1725975418157 - kind: conda name: msys2-conda-epoch version: '20160418' @@ -13121,57 +13327,57 @@ packages: timestamp: 1600387789153 - kind: conda name: mypy - version: 1.11.1 - build: py312h41a817b_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.1-py312h41a817b_0.conda - sha256: e708912d61d10ec7da9f7ccfe9738defd65b7bc1d9729b2751e057a8779835e4 - md5: c02a9d2e6400a518b0ebe53197462428 + version: 1.11.2 + build: py312h4389bb4_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda + sha256: 31d0292518c3c3090af632bc06ffa5f331fa6969ad9ae219e6505a6b2219d0af + md5: dd2e469b2e2f8a1cc4ae749a7ed44b7f depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - mypy_extensions >=1.0.0 - psutil >=4.0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - typing_extensions >=4.1.0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - - pkg:pypi/mypy?source=compressed-mapping - size: 17039274 - timestamp: 1722473734957 + - pkg:pypi/mypy?source=hash-mapping + size: 8560830 + timestamp: 1724602058839 - kind: conda name: mypy - version: 1.11.1 - build: py312h4389bb4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.1-py312h4389bb4_0.conda - sha256: c08d2cba9995ae455dba2f09c70f0a0715558acd2d6e91eeb129856fae8e47dc - md5: bba2403f2dbc616963403df9de652727 + version: 1.11.2 + build: py312h66e93f0_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda + sha256: aadb78145f51b5488806c86e5954cc3cb19b03f2297a464b2a2f27c0340332a8 + md5: ea315027e648236653f27d3d1ae893f6 depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=13 - mypy_extensions >=1.0.0 - psutil >=4.0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - typing_extensions >=4.1.0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - - pkg:pypi/mypy?source=compressed-mapping - size: 8525974 - timestamp: 1722473636039 + - pkg:pypi/mypy?source=hash-mapping + size: 17066588 + timestamp: 1724602213195 - kind: conda name: mypy - version: 1.11.1 - build: py312hbd25219_0 + version: 1.11.2 + build: py312hb553811_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.1-py312hbd25219_0.conda - sha256: 0d049236505f4efc5337a5b1e1c177c945ac69c407933e7344a3e7787f09d50f - md5: 5df78c800c06e4a01d1baabc92eea4aa + url: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.2-py312hb553811_0.conda + sha256: 99eced54663f6cf2b8b924f36bc2fc0317075d8bd3c38c47fff55e463687fb04 + md5: 4e22f7fed8b0572fa5d1b12e7a39a570 depends: - __osx >=10.13 - mypy_extensions >=1.0.0 @@ -13182,9 +13388,9 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/mypy?source=compressed-mapping - size: 10541916 - timestamp: 1722473220458 + - pkg:pypi/mypy?source=hash-mapping + size: 10502065 + timestamp: 1724601972090 - kind: conda name: mypy_extensions version: 1.0.0 @@ -13204,13 +13410,12 @@ packages: timestamp: 1675543414256 - kind: conda name: mysql-common - version: 8.3.0 - build: h70512c7_5 - build_number: 5 + version: 9.0.1 + build: h70512c7_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.3.0-h70512c7_5.conda - sha256: 09296629aab020fb131c8256d8683087769c53ce5197ca3a2abe040bfb285d88 - md5: 4b652e3e572cbb3f297e77c96313faea + url: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h70512c7_0.conda + sha256: 4417ba9daf1f818e62e399dc9ab33fcd12741d79d19db0884394cc9c766ae78d + md5: c567b6fa201bc424e84f1e70f7a36095 depends: - __glibc >=2.17,<3.0.a0 - libgcc-ng >=12 @@ -13219,30 +13424,29 @@ packages: license: GPL-2.0-or-later license_family: GPL purls: [] - size: 780145 - timestamp: 1721386057930 + size: 612947 + timestamp: 1723209940114 - kind: conda name: mysql-libs - version: 8.3.0 - build: ha479ceb_5 - build_number: 5 + version: 9.0.1 + build: ha479ceb_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.3.0-ha479ceb_5.conda - sha256: c6e9b0961b6877eda8c300b12a0939c81f403a4eb5c0db802e13130fd5a3a059 - md5: 82776ee8145b9d1fd6546604de4b351d + url: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-ha479ceb_0.conda + sha256: f4bea852a48a2168d2bdb73c9be6e3d0ba30525a7e4f0472e899a0773206a8a9 + md5: 6fd406aef37faad86bd7f37a94fb6f8a depends: - __glibc >=2.17,<3.0.a0 - libgcc-ng >=12 - libstdcxx-ng >=12 - libzlib >=1.3.1,<2.0a0 - - mysql-common 8.3.0 h70512c7_5 + - mysql-common 9.0.1 h70512c7_0 - openssl >=3.3.1,<4.0a0 - zstd >=1.5.6,<1.6.0a0 license: GPL-2.0-or-later license_family: GPL purls: [] - size: 1532137 - timestamp: 1721386157918 + size: 1368619 + timestamp: 1723210027997 - kind: conda name: nbclient version: 0.10.0 @@ -13325,29 +13529,34 @@ packages: - kind: conda name: ncurses version: '6.5' - build: h5846eda_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h5846eda_0.conda - sha256: 6ecc73db0e49143092c0934355ac41583a5d5a48c6914c5f6ca48e562d3a4b79 - md5: 02a888433d165c99bf09784a7b14d900 + build: he02047a_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda + sha256: 6a1d5d8634c1a07913f1c525db6455918cbc589d745fac46d9d6e30340c8731a + md5: 70caf8bb6cf39a0b6b7efc885f51c0fe + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 license: X11 AND BSD-3-Clause purls: [] - size: 823601 - timestamp: 1715195267791 + size: 889086 + timestamp: 1724658547447 - kind: conda name: ncurses version: '6.5' - build: h59595ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - sha256: 4fc3b384f4072b68853a0013ea83bdfd3d66b0126e2238e1d6e1560747aa7586 - md5: fcea371545eda051b6deafb24889fc69 + build: hf036a51_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda + sha256: b0b3180039ef19502525a2abd5833c00f9624af830fd391f851934d57bffb9af + md5: e102bbf8a6ceeaf429deab8032fc8977 depends: - - libgcc-ng >=12 + - __osx >=10.13 license: X11 AND BSD-3-Clause purls: [] - size: 887465 - timestamp: 1715194722503 + size: 822066 + timestamp: 1724658603042 - kind: conda name: nest-asyncio version: 1.6.0 @@ -13403,7 +13612,7 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/nodeenv?source=compressed-mapping + - pkg:pypi/nodeenv?source=hash-mapping size: 34489 timestamp: 1717585382642 - kind: conda @@ -13457,43 +13666,43 @@ packages: timestamp: 1669785313586 - kind: conda name: nss - version: '3.103' - build: h593d115_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/nss-3.103-h593d115_0.conda - sha256: f69c027c056a620f06b5f69c3c2a437cc8768bbcbe48664cfdb46ffee7d7753d - md5: 233bfe41968d6fb04eba9258bb5061ad + version: '3.104' + build: h3135457_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/nss-3.104-h3135457_0.conda + sha256: a5b3fe0367a39edfac92e2cd69426123049257cb6aedf9bba002ea45c70fcdfc + md5: 8cf0f6f72197a4fb10ccb897b30f1731 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - __osx >=10.13 + - libcxx >=17 - libsqlite >=3.46.0,<4.0a0 - - libstdcxx-ng >=12 - libzlib >=1.3.1,<2.0a0 - nspr >=4.35,<5.0a0 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 1976811 - timestamp: 1722554596783 + size: 1859778 + timestamp: 1725079369298 - kind: conda name: nss - version: '3.103' - build: he7eb89d_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/nss-3.103-he7eb89d_0.conda - sha256: c45b554bbe1b9e52e359845b587a69b35f33dc47ca45a6ce8f4aa44cbb3f5ded - md5: 2a7c2b52e8157c187b5be0ed958a26db + version: '3.104' + build: hd34e28f_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda + sha256: 0beb64ae310a34537c41e43110ebc24352c4319e6348cebe3d8a89b02382212c + md5: 0664e59f6937a660eba9f3d2f9123fa8 depends: - - __osx >=10.13 - - libcxx >=16 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - libsqlite >=3.46.0,<4.0a0 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - nspr >=4.35,<5.0a0 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 1856317 - timestamp: 1722554746154 + size: 1979701 + timestamp: 1725079209552 - kind: conda name: numba version: 0.60.0 @@ -13587,13 +13796,13 @@ packages: timestamp: 1718888811663 - kind: conda name: numba_celltree - version: 0.1.8 + version: 0.2.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.8-pyhd8ed1ab_0.conda - sha256: 4b30d964bf034b41ce14842bf8fa3040b21878c0da1bdec15c5523ab785bc311 - md5: 02b10d14b2e6693519804fe90d41c589 + url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + sha256: 8ef116fc2af9d70afe8123cb4157f5efa55cfd042fd1ef36cad9aab65b36ca5a + md5: e2ed9d4ac5f28671045cd33b2269969a depends: - numba >=0.50 - numpy @@ -13601,72 +13810,72 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/numba-celltree?source=compressed-mapping - size: 33524 - timestamp: 1722763371483 + - pkg:pypi/numba-celltree?source=hash-mapping + size: 33566 + timestamp: 1724401764094 - kind: conda name: numpy - version: 2.0.1 - build: py312h1103770_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.1-py312h1103770_0.conda - sha256: 0746a37d09036b4164ac14dd1328dd4e449a038383aac1e25e2d5f3a691518da - md5: 9f444595d8d9682891f2f078fc19da43 + version: 2.0.2 + build: py312h49bc9c5_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.2-py312h49bc9c5_0.conda + sha256: 14f0707f73ea97579b8400e55bc394b9ddf7fdbcc87eb8ad9068b6ebbe62b97c + md5: 6be606bcfd79a5958b19a437b16028aa depends: - - __glibc >=2.17,<3.0.a0 - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 - - libgcc-ng >=12 - liblapack >=3.9.0,<4.0a0 - - libstdcxx-ng >=12 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - numpy-base <0a0 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/numpy?source=compressed-mapping - size: 8345605 - timestamp: 1721966364929 + - pkg:pypi/numpy?source=hash-mapping + size: 6889345 + timestamp: 1724749766610 - kind: conda name: numpy - version: 2.0.1 - build: py312h49bc9c5_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.1-py312h49bc9c5_0.conda - sha256: 13b38db57cefbbea0cb6a44a5c75df8010480dc6200eda3491c8d203072d1675 - md5: e7fed4e2639f3a0d58bd8b2164059e8d + version: 2.0.2 + build: py312h58c1407_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.2-py312h58c1407_0.conda + sha256: 8c54907fdc60b5f51bb505f2414e9a75d07e0c56d77651010428de91e8343843 + md5: c705a6295a3946400a0c0893dbec87bb depends: + - __glibc >=2.17,<3.0.a0 - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 + - libgcc-ng >=13 - liblapack >=3.9.0,<4.0a0 + - libstdcxx-ng >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - numpy-base <0a0 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/numpy?source=compressed-mapping - size: 6945867 - timestamp: 1721966986321 + - pkg:pypi/numpy?source=hash-mapping + size: 8383737 + timestamp: 1724749066469 - kind: conda name: numpy - version: 2.0.1 - build: py312h8813227_0 + version: 2.0.2 + build: py312he4d506f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.1-py312h8813227_0.conda - sha256: 3f73ed4464e3dc639c875b6cbe86e8095f88afe047bdfdc3d4b4ae120dd830e8 - md5: 7f239fbf9d9355f86529a35af0b24d29 + url: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.2-py312he4d506f_0.conda + sha256: 8f766bf4a1d34b9f0150aee5272484adf52a73083c5749261e1e66cfa0c741a4 + md5: f565ae6749c3757d3cb87a9d2953bed8 depends: - __osx >=10.13 - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 - - libcxx >=16 + - libcxx >=17 - liblapack >=3.9.0,<4.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -13675,9 +13884,9 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/numpy?source=compressed-mapping - size: 7464264 - timestamp: 1721966235928 + - pkg:pypi/numpy?source=hash-mapping + size: 7427254 + timestamp: 1724749204501 - kind: conda name: odc-geo version: 0.4.8 @@ -13761,48 +13970,51 @@ packages: - kind: conda name: openpyxl version: 3.1.5 - build: py312h8847cbe_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h8847cbe_0.conda - sha256: 9ac8a3b0952fad38cbf3218e447d122c7e09f25ff1e36bca97600f3eb21a043c - md5: 0a0a886c69d1caa098e0240849b6da8e + build: py312h710cb58_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda + sha256: 1dd541ef7a1357594c3f4ecb1a0c86f42f58e09f18db8b9099b7bf01b52f07c5 + md5: 69a8838436435f59d72ddcb8dfd24a28 depends: - et_xmlfile + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - - pkg:pypi/openpyxl?source=compressed-mapping - size: 653865 - timestamp: 1723459258669 + - pkg:pypi/openpyxl?source=hash-mapping + size: 695844 + timestamp: 1725461065535 - kind: conda name: openpyxl version: 3.1.5 - build: py312h98912ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h98912ed_0.conda - sha256: ec72232cb8c6a537f5b8f01cbc68d64486e62505d4cd9b25d5dc029f26db0cf9 - md5: 5265c8fb3517b52a39bf253387d35dcf + build: py312h732d5f6_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h732d5f6_1.conda + sha256: 2ac1cb340b7767aa6b22c2173b6e5f94efcefb4421bb78a4bdd0e32cc3fdfcaa + md5: 5ff403ec5f54e5ab0246ed4aa080b5a1 depends: - et_xmlfile - - libgcc-ng >=12 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - - pkg:pypi/openpyxl?source=compressed-mapping - size: 696433 - timestamp: 1723459244053 + - pkg:pypi/openpyxl?source=hash-mapping + size: 654641 + timestamp: 1725461063935 - kind: conda name: openpyxl version: 3.1.5 - build: py312he70551f_0 + build: py312he70551f_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_0.conda - sha256: 42994cf45c2ce3f463872c4cf069b304142356b103da14cf5c25da9fd444a257 - md5: acde6d93e7518f0eda9fc1d7990e6723 + url: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_1.conda + sha256: e1e33f94b2c18c3904f5fe7deafba092710bb6c104fd8a07b5b8896a696b1164 + md5: dae0fc7ab538dda92fb977c48ab7acd1 depends: - et_xmlfile - python >=3.12,<3.13.0a0 @@ -13813,69 +14025,60 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/openpyxl?source=compressed-mapping - size: 625249 - timestamp: 1723459559708 + - pkg:pypi/openpyxl?source=hash-mapping + size: 627490 + timestamp: 1725461370219 - kind: conda name: openssl - version: 3.3.1 - build: h2466b09_2 - build_number: 2 + version: 3.3.2 + build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.1-h2466b09_2.conda - sha256: d86c4fa31294ad9068717788197e97e5637e056c82745ffb6d0e88fd1fef1a9d - md5: 375dbc2a4d5a2e4c738703207e8e368b + url: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.2-h2466b09_0.conda + sha256: a45c42f3577294e22ac39ddb6ef5a64fd5322e8a6725afefbf4f2b4109340bf9 + md5: 1dc86753693df5e3326bb8a85b74c589 depends: - ca-certificates - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 - constrains: - - pyopenssl >=22.1 license: Apache-2.0 license_family: Apache purls: [] - size: 8385012 - timestamp: 1721197465883 + size: 8396053 + timestamp: 1725412961673 - kind: conda name: openssl - version: 3.3.1 - build: h4bc722e_2 - build_number: 2 + version: 3.3.2 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - sha256: b294b3cc706ad1048cdb514f0db3da9f37ae3fcc0c53a7104083dd0918adb200 - md5: e1b454497f9f7c1147fdde4b53f1b512 + url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda + sha256: cee91036686419f6dd6086902acf7142b4916e1c4ba042e9ca23e151da012b6d + md5: 4d638782050ab6faa27275bed57e9b4e depends: - __glibc >=2.17,<3.0.a0 - ca-certificates - - libgcc-ng >=12 - constrains: - - pyopenssl >=22.1 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 2895213 - timestamp: 1721194688955 + size: 2891789 + timestamp: 1725410790053 - kind: conda name: openssl - version: 3.3.1 - build: h87427d6_2 - build_number: 2 + version: 3.3.2 + build: hd23fc13_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-h87427d6_2.conda - sha256: 3cb0c05fbfd8cdb9b767396fc0e0af2d78eb4d68592855481254104330d4a4eb - md5: 3f3dbeedbee31e257866407d9dea1ff5 + url: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.2-hd23fc13_0.conda + sha256: 2b75d4b56e45992adf172b158143742daeb316c35274b36f385ccb6644e93268 + md5: 2ff47134c8e292868a4609519b1ea3b6 depends: - __osx >=10.13 - ca-certificates - constrains: - - pyopenssl >=22.1 license: Apache-2.0 license_family: Apache purls: [] - size: 2552939 - timestamp: 1721194674491 + size: 2544654 + timestamp: 1725410973572 - kind: conda name: orc version: 2.0.2 @@ -14054,103 +14257,102 @@ packages: timestamp: 1715899159343 - kind: conda name: pandas-stubs - version: 2.2.2.240807 + version: 2.2.2.240909 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240807-pyhd8ed1ab_0.conda - sha256: 858fc48f3f9b398d8ca1a4006d637ec3767cb24b5c8c6f669dcaeb64442fa4c4 - md5: f045ee4454edcf41eb489594ed8cef62 + url: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + sha256: 1cf735133060d2c687bc556824709f596e16b0d992fff8c3d16bf6fda7cb92ae + md5: 5139243ed81869ac257a910995650f36 depends: - numpy >=1.26.0 - python >=3.9 - types-pytz >=2022.1.1 license: BSD-3-Clause - license_family: BSD purls: - - pkg:pypi/pandas-stubs?source=compressed-mapping - size: 97850 - timestamp: 1723055218678 + - pkg:pypi/pandas-stubs?source=hash-mapping + size: 98489 + timestamp: 1725975727109 - kind: conda name: pandera - version: 0.20.3 + version: 0.20.4 build: hd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.3-hd8ed1ab_0.conda - sha256: d8f6c4b2e82ea9f3a0ee24080e38cae8c46fccf9c1751b005a10d31b7dcc7873 - md5: c5dccdafb966b0a4a156cba0168379c7 + url: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + sha256: 62f85c709021bb721101e4baddf1bb293077d70a5ef0f17bb3cc3c1325c868ae + md5: 1f5a3edcac6b4ebded70e2bd9871a6a8 depends: - - pandera-base >=0.20.3,<0.20.4.0a0 + - pandera-base >=0.20.4,<0.20.5.0a0 license: MIT license_family: MIT purls: [] - size: 6805 - timestamp: 1721327041967 + size: 6785 + timestamp: 1725408304237 - kind: conda name: pandera-base - version: 0.20.3 + version: 0.20.4 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.3-pyhd8ed1ab_0.conda - sha256: 319948578d3c73411ce3dd63b3b497a38b2e8c0e5559ac6cf75db0ac5a24ee4e - md5: 736acd845c4fe83600fc44358e62100b + url: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + sha256: 7b23ec2a4cea4fc2adc10828829fcb044c196719d0dd9ca63f4577b1855eb6c8 + md5: c2a59e20377cedcfa4a4dfa98fa7f36e depends: - multimethod <=1.10.0 - numpy >=1.19.0 - packaging >=20.0 - pandas >=1.2.0 - - pydantic + - pydantic >=1.5.0 - python >=3.8 - - typeguard >=3.0.2 + - typeguard - typing_inspect >=0.6.0 - wrapt license: MIT license_family: MIT purls: - pkg:pypi/pandera?source=hash-mapping - size: 148324 - timestamp: 1721327037113 + size: 150012 + timestamp: 1725408301583 - kind: conda name: pandoc - version: 3.2.1 + version: '3.2' build: h57928b3_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2.1-h57928b3_0.conda - sha256: 664dd6bbc54ed3a2c25193573deda2f3fb2c368844ab003906d94aa968dfdb98 - md5: 81b32fef1406927cd9d1ed0245dd333a + url: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2-h57928b3_0.conda + sha256: 20781b4a5ed66b459968f5929a8994473198f0505797382a8eee794f5d9d7c32 + md5: 44c65fee440f2492edb8cb25f5770065 license: GPL-2.0-or-later license_family: GPL purls: [] - size: 25083374 - timestamp: 1719301123869 + size: 24991574 + timestamp: 1715499984220 - kind: conda name: pandoc - version: 3.2.1 + version: '3.2' build: h694c41f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2.1-h694c41f_0.conda - sha256: 48dedb78b6bf95ab5569c9d5d51a1f842dd754f4a9cf5132545d912eecaef391 - md5: 41918abf697bb6544ba287ef5e15cf16 + url: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2-h694c41f_0.conda + sha256: d1a975c8986fd47021ac368dc736f03c47eda8fcf5b1217bad184888747f7336 + md5: e2c54cad9c955434dbfe89fc655e516d license: GPL-2.0-or-later license_family: GPL purls: [] - size: 14103182 - timestamp: 1719300764821 + size: 14038781 + timestamp: 1715499676602 - kind: conda name: pandoc - version: 3.2.1 + version: '3.2' build: ha770c72_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2.1-ha770c72_0.conda - sha256: 130bcefaeeb55ed68ea4403d45b21105390292a2e3167779da099e241d713109 - md5: b39b12d3809e4042f832b76192e0e7e8 + url: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2-ha770c72_0.conda + sha256: 418348076c1a39170efb0bdc8a584ddd11e9ed0ff58ccd905488d3f165ca98ba + md5: 8c924f0b7f3e064b1c954a08e7c32fba license: GPL-2.0-or-later license_family: GPL purls: [] - size: 20850791 - timestamp: 1719300679855 + size: 21088628 + timestamp: 1715499623651 - kind: conda name: pandocfilters version: 1.5.0 @@ -14314,15 +14516,15 @@ packages: - kind: conda name: pillow version: 10.4.0 - build: py312h287a98d_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h287a98d_0.conda - sha256: f3bca9472702f32bf85196efbf013e9dabe130776e76c7f81062f18682f33a05 - md5: 59ea71eed98aee0bebbbdd3b118167c7 + build: py312h381445a_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_1.conda + sha256: 0b52e708ac4b72e6e1608de517cd4c8e6517dd525e23163a69bf73c7261399fc + md5: c57e54ae4acca720fb3a44bee93cb5b9 depends: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - - libgcc-ng >=12 - libjpeg-turbo >=3.0.0,<4.0a0 - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.4.0,<2.0a0 @@ -14332,22 +14534,28 @@ packages: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: HPND purls: - pkg:pypi/pillow?source=hash-mapping - size: 42068301 - timestamp: 1719903698022 + size: 42468305 + timestamp: 1726075694989 - kind: conda name: pillow version: 10.4.0 - build: py312h381445a_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_0.conda - sha256: 2c76c1ded20c5199d134ccecab596412510a016218f342914fd85384a850e7ed - md5: cc1e714c3cc43c59d9d0efa228c16364 + build: py312h56024de_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h56024de_1.conda + sha256: a0961e7ff663d4c7a82478ff45fba72a346070f2a017a9b56daff279c0dbb8e2 + md5: 4bd6077376c7f9c1ce33fd8319069e5b depends: + - __glibc >=2.17,<3.0.a0 - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 + - libgcc >=13 - libjpeg-turbo >=3.0.0,<4.0a0 - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.4.0,<2.0a0 @@ -14357,22 +14565,20 @@ packages: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: HPND purls: - pkg:pypi/pillow?source=hash-mapping - size: 42560613 - timestamp: 1719904152461 + size: 42689452 + timestamp: 1726075285193 - kind: conda name: pillow version: 10.4.0 - build: py312hbd70edc_0 + build: py312h683ea77_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312hbd70edc_0.conda - sha256: 38b6e8c63c8ebfd9c8552312cecd385ec7bfad6e5733f5c6b6df0db801ea5f43 - md5: 8d55e92fa6380ac8c245f253b096fefd + url: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312h683ea77_1.conda + sha256: 1e8d489190aa0b4682f52468efe4db46b37e50679c64879696e42578c9a283a4 + md5: fb17ec3065f089dad64d9b597b1e8ce4 depends: - __osx >=10.13 - freetype >=2.12.1,<3.0a0 @@ -14389,27 +14595,28 @@ packages: license: HPND purls: - pkg:pypi/pillow?source=hash-mapping - size: 42081826 - timestamp: 1719903909255 + size: 42329265 + timestamp: 1726075276862 - kind: conda name: pip version: '24.2' - build: pyhd8ed1ab_0 + build: pyh8b19718_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyhd8ed1ab_0.conda - sha256: 15b480571a7a4d896aa187648cce99f98bac3926253f028f228d2e9e1cf7c1e1 - md5: 6721aef6bfe5937abe70181545dd2c51 + url: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda + sha256: d820e5358bcb117fa6286e55d4550c60b0332443df62121df839eab2d11c890b + md5: 6c78fbb8ddfd64bcb55b5cbafd2d2c43 depends: - - python >=3.8 + - python >=3.8,<3.13.0a0 - setuptools - wheel license: MIT license_family: MIT purls: - - pkg:pypi/pip?source=compressed-mapping - size: 1238498 - timestamp: 1722451042495 + - pkg:pypi/pip?source=hash-mapping + size: 1237976 + timestamp: 1724954490262 - kind: conda name: pixman version: 0.43.2 @@ -14477,21 +14684,21 @@ packages: timestamp: 1694617398467 - kind: conda name: platformdirs - version: 4.2.2 + version: 4.3.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda - sha256: adc59384cf0b2fc6dc7362840151e8cb076349197a38f7230278252698a88442 - md5: 6f6cf28bf8e021933869bae3f84b8fc9 + url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda + sha256: 3aef5bb863a2db94e47272fd5ec5a5e4b240eafba79ebb9df7a162797cf035a3 + md5: e1a2dfcd5695f0744f1bcd3bbfe02523 depends: - python >=3.8 license: MIT license_family: MIT purls: - pkg:pypi/platformdirs?source=hash-mapping - size: 20572 - timestamp: 1715777739019 + size: 20623 + timestamp: 1725821846879 - kind: conda name: pluggy version: 1.5.0 @@ -14551,41 +14758,46 @@ packages: - kind: conda name: poppler version: 24.08.0 - build: h686f694_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h686f694_0.conda - sha256: a18060dc625c081307f800b3e89b487e375dc9a9fb7b9f0fa093d6e41927614e - md5: 927a6b4a37decb917ce5b52eed2f3333 + build: h47131b8_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda + sha256: b32fe787525236908e21885fef8d77e8ebdbbe6694b2fb89ed799444ebda3178 + md5: 0854b9ff0cc10a1f6f67b0f352b8e75a depends: + - __glibc >=2.17,<3.0.a0 - cairo >=1.18.0,<2.0a0 + - fontconfig >=2.14.2,<3.0a0 + - fonts-conda-ecosystem - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libcurl >=8.9.1,<9.0a0 + - libgcc-ng >=13 - libglib >=2.80.3,<3.0a0 - libiconv >=1.17,<2.0a0 - - libintl >=0.22.5,<1.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libpng >=1.6.43,<1.7.0a0 + - libstdcxx-ng >=13 - libtiff >=4.6.0,<4.7.0a0 - libzlib >=1.3.1,<2.0a0 + - nspr >=4.35,<5.0a0 + - nss >=3.103,<4.0a0 - openjpeg >=2.5.2,<3.0a0 - poppler-data - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: GPL-2.0-only license_family: GPL purls: [] - size: 2379143 - timestamp: 1723083494559 + size: 1907007 + timestamp: 1724659640508 - kind: conda name: poppler version: 24.08.0 - build: h744cbf2_0 + build: h65860a0_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h744cbf2_0.conda - sha256: 5be39a4f1569045a15e46928b67e5459493ae921519888d3a82c1c64dbdc842a - md5: ed41cbc38d3955f7aa121196461dc5d1 + url: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h65860a0_1.conda + sha256: 2a526b86471a539eafe6ad49c5e380fe47a3c8b8b6fbd82125d08e3861028055 + md5: 3fd516e90f0b36d6d47b5a91cf6dd90c depends: - __osx >=10.13 - cairo >=1.18.0,<2.0a0 @@ -14594,7 +14806,7 @@ packages: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libcurl >=8.9.1,<9.0a0 - - libcxx >=16 + - libcxx >=17 - libglib >=2.80.3,<3.0a0 - libiconv >=1.17,<2.0a0 - libintl >=0.22.5,<1.0a0 @@ -14609,41 +14821,39 @@ packages: license: GPL-2.0-only license_family: GPL purls: [] - size: 1591847 - timestamp: 1723083003580 + size: 1591573 + timestamp: 1724659773322 - kind: conda name: poppler version: 24.08.0 - build: hb0d391f_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-hb0d391f_0.conda - sha256: 78f1163ad49f6745d8086922054c837e2ecce69877dd0efa2a82f3f2b4fc1bdd - md5: cbe41fbbe05b1f78182ced1f0defdf81 + build: h9415970_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h9415970_1.conda + sha256: e0066265fefd6954b68f4ab74d133ad49c5ed8c5d4c668e0450ba3e7b9a9501c + md5: 4c7b7a4301314afed48c6544c34399e4 depends: - - __glibc >=2.17,<3.0.a0 - cairo >=1.18.0,<2.0a0 - - fontconfig >=2.14.2,<3.0a0 - - fonts-conda-ecosystem - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libcurl >=8.9.1,<9.0a0 - - libgcc-ng >=12 - libglib >=2.80.3,<3.0a0 - libiconv >=1.17,<2.0a0 + - libintl >=0.22.5,<1.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libpng >=1.6.43,<1.7.0a0 - - libstdcxx-ng >=12 - libtiff >=4.6.0,<4.7.0a0 - libzlib >=1.3.1,<2.0a0 - - nspr >=4.35,<5.0a0 - - nss >=3.103,<4.0a0 - openjpeg >=2.5.2,<3.0a0 - poppler-data + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: GPL-2.0-only license_family: GPL purls: [] - size: 1911070 - timestamp: 1723082747225 + size: 2355995 + timestamp: 1724660655649 - kind: conda name: poppler-data version: 0.4.12 @@ -14661,15 +14871,16 @@ packages: - kind: conda name: postgresql version: '16.4' - build: h9b73963_0 + build: h4b98a8f_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h9b73963_0.conda - sha256: 40e822f4747e976aabaece3ef80bf57e070c800b2bece7b3c641ded3c409b34f - md5: f0c03ea499c8b5cf05d0a9ed28c1bee0 + url: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h4b98a8f_1.conda + sha256: 2399f6b2eea2af0bd37a6c71fe9055a83248fbbd438cde14d3057dabff39a279 + md5: 1286c495eb0b5817270acdf5b4144b03 depends: - __osx >=10.13 - krb5 >=1.21.3,<1.22.0a0 - - libpq 16.4 h4501773_0 + - libpq 16.4 h75a757a_1 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.1,<4.0a0 @@ -14678,21 +14889,22 @@ packages: - tzdata license: PostgreSQL purls: [] - size: 4627424 - timestamp: 1723137318399 + size: 4593109 + timestamp: 1724948725869 - kind: conda name: postgresql version: '16.4' - build: ha8faf9a_0 + build: hb2eb5c0_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-ha8faf9a_0.conda - sha256: ed6e79408a4557bf6bd765fcb4772c2835964b04d287ac93799c78182a10b35f - md5: 58af4d5fc019a678745f6bff7ddee225 + url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda + sha256: 7b6c307722ff7acaa26f04a19c124b5548e16a8097576709d911ef7123e2fbaf + md5: 1aaec5dbae29b3f0a2c20eeb84e9e38a depends: - __glibc >=2.17,<3.0.a0 - krb5 >=1.21.3,<1.22.0a0 - - libgcc-ng >=12 - - libpq 16.4 h482b261_0 + - libgcc >=13 + - libpq 16.4 h2d7952a_1 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.1,<4.0a0 @@ -14701,19 +14913,20 @@ packages: - tzdata license: PostgreSQL purls: [] - size: 5345160 - timestamp: 1723136740934 + size: 5323539 + timestamp: 1724948476169 - kind: conda name: postgresql version: '16.4' - build: hd835ec0_0 + build: hd835ec0_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_0.conda - sha256: 89ff014b89329213d53e8c36bf232718c2a73454025280cc1792cabb5dcc3399 - md5: 7cbd1adbfcc15719126cebca9cb7fdb5 + url: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_1.conda + sha256: dfbb2a5979e904d48482a68197227905ef99e4e2e290c989cf07ca7283273a94 + md5: 9dc48367ebcf896988c7df870ec6cba3 depends: - krb5 >=1.21.3,<1.22.0a0 - - libpq 16.4 hab9416b_0 + - libpq 16.4 hab9416b_1 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.1,<4.0a0 @@ -14722,17 +14935,18 @@ packages: - vc14_runtime >=14.29.30139 license: PostgreSQL purls: [] - size: 18717056 - timestamp: 1723137923606 + size: 18666036 + timestamp: 1724949389398 - kind: conda name: pre-commit version: 3.8.0 - build: pyha770c72_0 + build: pyha770c72_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_0.conda - sha256: 2363c8706ca3b2a3385b09e33f639f6b66e4fa8d00a21c3dea4d934472a96e85 - md5: 1822e87a5d357f79c6aab871d86fb062 + url: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda + sha256: c2b964c86b2cd00e494093d751b1f8697b3c4bf924ff70648387af161444cc82 + md5: 004cff3a7f6fafb0a041fb575de85185 depends: - cfgv >=2.0.0 - identify >=1.0.0 @@ -14743,9 +14957,9 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/pre-commit?source=compressed-mapping - size: 180036 - timestamp: 1722604788932 + - pkg:pypi/pre-commit?source=hash-mapping + size: 180526 + timestamp: 1725795837882 - kind: conda name: proj version: 9.4.1 @@ -14857,11 +15071,12 @@ packages: - kind: conda name: psutil version: 6.0.0 - build: py312h4389bb4_0 + build: py312h4389bb4_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_0.conda - sha256: c9ed9457fa4c4900b7f2fc5e28493bdd3885acb823ed48c01dae59f043a65ad8 - md5: 86fd428b42be7495c93d0ff837adfc9e + url: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda + sha256: fc16b9c6a511a6c127d7d6b973771be14266aaa8a3069abbf0b70727e1ab8394 + md5: 6847f7375068f9ef7d22ca7cb1055f31 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -14872,34 +15087,37 @@ packages: license_family: BSD purls: - pkg:pypi/psutil?source=hash-mapping - size: 509298 - timestamp: 1719275243368 + size: 506867 + timestamp: 1725738313194 - kind: conda name: psutil version: 6.0.0 - build: py312h9a8786e_0 + build: py312h66e93f0_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h9a8786e_0.conda - sha256: d629363515df957507411fd24db2a0635ac893e5d60b2ee2f656b53be9c70b1d - md5: 1aeffa86c55972ca4e88ac843eccedf2 + url: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda + sha256: fae2f63dd668ab2e7b2813f826508ae2c83f43577eeef5acf304f736b327c5be + md5: 76706c73e315d21bede804514a39bccf depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/psutil?source=hash-mapping - size: 493452 - timestamp: 1719274737481 + size: 493021 + timestamp: 1725738009896 - kind: conda name: psutil version: 6.0.0 - build: py312hbd25219_0 + build: py312hb553811_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hbd25219_0.conda - sha256: 06e949079497cf8e1c9e253b77be709ec0c11816656814e1ad857ac5cbbea65b - md5: db086d71e9be086313110a670b6d549f + url: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda + sha256: ac711ad735ebfe9bc01d0d2c11ef56fe3f5a4e2499774b5e46eac44749adece7 + md5: b2395d1f7ceb250b13b65bd13c5558a2 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -14908,8 +15126,8 @@ packages: license_family: BSD purls: - pkg:pypi/psutil?source=hash-mapping - size: 499307 - timestamp: 1719274858092 + size: 499530 + timestamp: 1725737996873 - kind: conda name: pthread-stubs version: '0.4' @@ -15024,8 +15242,7 @@ packages: - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: APACHE - purls: - - pkg:pypi/pyarrow?source=compressed-mapping + purls: [] size: 25826 timestamp: 1722487375945 - kind: conda @@ -15048,8 +15265,7 @@ packages: - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: APACHE - purls: - - pkg:pypi/pyarrow?source=compressed-mapping + purls: [] size: 26280 timestamp: 1722489225383 - kind: conda @@ -15072,8 +15288,7 @@ packages: - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: APACHE - purls: - - pkg:pypi/pyarrow?source=compressed-mapping + purls: [] size: 25693 timestamp: 1722487649034 - kind: conda @@ -15098,7 +15313,7 @@ packages: license: Apache-2.0 license_family: APACHE purls: - - pkg:pypi/pyarrow?source=compressed-mapping + - pkg:pypi/pyarrow?source=hash-mapping size: 4095434 timestamp: 1722487335874 - kind: conda @@ -15124,7 +15339,7 @@ packages: license: Apache-2.0 license_family: APACHE purls: - - pkg:pypi/pyarrow?source=compressed-mapping + - pkg:pypi/pyarrow?source=hash-mapping size: 3503799 timestamp: 1722488098978 - kind: conda @@ -15150,7 +15365,7 @@ packages: license: Apache-2.0 license_family: APACHE purls: - - pkg:pypi/pyarrow?source=compressed-mapping + - pkg:pypi/pyarrow?source=hash-mapping size: 4645745 timestamp: 1722487499158 - kind: conda @@ -15190,88 +15405,88 @@ packages: timestamp: 1711811634025 - kind: conda name: pydantic - version: 2.8.2 + version: 2.9.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.8.2-pyhd8ed1ab_0.conda - sha256: 5a877153f7eaaab9724db5b64366a35e346007c9c104c1d6a6042f83b2f4f0df - md5: 539a038a24a959662df1fcaa2cfc5c3e + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda + sha256: 9064ec63d676d83452a6a07cb92d95ebfa02b5016841956ce55e324c45e012ee + md5: 5309e66d385d7367364e838764ad2ac4 depends: - - annotated-types >=0.4.0 - - pydantic-core 2.20.1 + - annotated-types >=0.6.0 + - pydantic-core 2.23.3 - python >=3.7 - typing-extensions >=4.6.1 license: MIT license_family: MIT purls: - pkg:pypi/pydantic?source=hash-mapping - size: 292538 - timestamp: 1720293163725 + size: 300605 + timestamp: 1725908662611 - kind: conda name: pydantic-core - version: 2.20.1 - build: py312h2615798_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.20.1-py312h2615798_0.conda - sha256: 2dfe7ebca8de86e35e4231000936bcf14b56e6d9a3c09f4abc91ab090050c5ca - md5: bf5efeeab4b8c0259119a4281b5d3531 + version: 2.23.3 + build: py312h12e396e_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.3-py312h12e396e_0.conda + sha256: 1894e49665d343cbb5c2ae54107f2bf9077f481cdf6df40e851d14347bd9e07c + md5: 4052762306d758de4d61c7cc71edfe2b depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - typing-extensions >=4.6.0,!=4.7.0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + constrains: + - __glibc >=2.17 license: MIT license_family: MIT purls: - pkg:pypi/pydantic-core?source=hash-mapping - size: 1570939 - timestamp: 1720042355599 + size: 1615644 + timestamp: 1725735931378 - kind: conda name: pydantic-core - version: 2.20.1 - build: py312ha47ea1c_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.20.1-py312ha47ea1c_0.conda - sha256: d82efcb45a6958af050851f76544fd35a6968fc50f613a2b24dd3467fab7a8d7 - md5: 8e095b6acd6405ea0da845d191302faf + version: 2.23.3 + build: py312h2615798_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.3-py312h2615798_0.conda + sha256: 74904399f23430b6d9d8b2eacb3e12257a2c2ab61c1d324e0206517c90c71cb9 + md5: 2e7eac3f63413617bc6f4e7018305707 depends: - - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - typing-extensions >=4.6.0,!=4.7.0 - constrains: - - __osx >=10.13 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/pydantic-core?source=hash-mapping - size: 1529185 - timestamp: 1720041729851 + size: 1569350 + timestamp: 1725736732484 - kind: conda name: pydantic-core - version: 2.20.1 - build: py312hf008fa9_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.20.1-py312hf008fa9_0.conda - sha256: adf117d3289c8dd97ffdb3076bc488217fedd02f3d96d35cc971f4de33460602 - md5: 8cc8f335b7e355558854236d86b2bea4 + version: 2.23.3 + build: py312h669792a_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.3-py312h669792a_0.conda + sha256: 38f7ef2eb082a75cafbcc37d05e285780858dfff64004d80afdd03a04448a88b + md5: 6599b550ea3dae7abbeda4f44e78750c depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - typing-extensions >=4.6.0,!=4.7.0 constrains: - - __glibc >=2.17 + - __osx >=10.13 license: MIT license_family: MIT purls: - pkg:pypi/pydantic-core?source=hash-mapping - size: 1612296 - timestamp: 1720041586700 + size: 1535653 + timestamp: 1725736002889 - kind: conda name: pygments version: 2.18.0 @@ -15292,11 +15507,12 @@ packages: - kind: conda name: pyobjc-core version: 10.3.1 - build: py312he77c50b_0 + build: py312hab44e94_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312he77c50b_0.conda - sha256: d3f056d2fb9fb2838b79672b17f2b1305218c1e95fbf05f0b02ac1eca513082d - md5: fb6108445d2e14c5aa1f79fa97aab8ed + url: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312hab44e94_1.conda + sha256: 2cd47e3b011640115066d71a5266c825ab85854c1e5fff0fef2f24318f8c63e8 + md5: a2259b39321aef5c0548de366cc9b861 depends: - __osx >=10.13 - libffi >=3.4,<4.0a0 @@ -15307,16 +15523,17 @@ packages: license_family: MIT purls: - pkg:pypi/pyobjc-core?source=hash-mapping - size: 496184 - timestamp: 1718171987828 + size: 499240 + timestamp: 1725739564809 - kind: conda name: pyobjc-framework-cocoa version: 10.3.1 - build: py312he77c50b_0 + build: py312hab44e94_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312he77c50b_0.conda - sha256: aa99ea58ad2f8ade894c11f5be2e9e28860efe527f0994532c84bef20eef249a - md5: 58a1af350ed69dd0d9e43c652c9b35b6 + url: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312hab44e94_1.conda + sha256: 0b6a7635467fb54d094fdeca82406ca6ecdffafc69a943066affe73431d505d5 + md5: 2cd451bd736cd2273b766b709c5ab7fa depends: - __osx >=10.13 - libffi >=3.4,<4.0a0 @@ -15327,21 +15544,24 @@ packages: license_family: MIT purls: - pkg:pypi/pyobjc-framework-cocoa?source=hash-mapping - size: 375734 - timestamp: 1718645660119 + size: 377479 + timestamp: 1725875154490 - kind: conda name: pyogrio version: 0.9.0 - build: py312h43b3a95_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312h43b3a95_0.conda - sha256: 9dc89062437d698a1060644c96c9800bacb12370ddf416f75d2fda87afde5dea - md5: 1a22b21b82d6d134a06440dbaf46d1d7 + build: py312h5aa26c2_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h5aa26c2_2.conda + sha256: e9682d1664e09c97f536060696896f6c0e9e84914a635351d406da836140266d + md5: 8b4325775ed711941bbf6b8c5ad2b5e9 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - gdal - - libcxx >=16 - - libgdal >=3.9.0,<3.10.0a0 + - libgcc >=13 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 + - libstdcxx >=13 - numpy - packaging - python >=3.12,<3.13.0a0 @@ -15350,21 +15570,23 @@ packages: license_family: MIT purls: - pkg:pypi/pyogrio?source=hash-mapping - size: 664241 - timestamp: 1718696098770 + size: 733154 + timestamp: 1725519798030 - kind: conda name: pyogrio version: 0.9.0 - build: py312h8ad7a51_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h8ad7a51_0.conda - sha256: 4f2cc106c738be0076c11b487546bd448aa8fca7f19d2b0f54afd8fa2ee0b7d1 - md5: f4d2803818632b2175fa58de7f653901 + build: py312hcd3578f_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312hcd3578f_2.conda + sha256: 58a8a7fbfd8e1d4637c89ad1d8ae2dc5874d8cf92d7593c5c006cc05efe73d78 + md5: 70b136b0fdb2751e6c2a0bfd00ef2f6b depends: + - __osx >=10.13 - gdal - - libgcc-ng >=12 - - libgdal >=3.9.0,<3.10.0a0 - - libstdcxx-ng >=12 + - libcxx >=17 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 - numpy - packaging - python >=3.12,<3.13.0a0 @@ -15373,19 +15595,21 @@ packages: license_family: MIT purls: - pkg:pypi/pyogrio?source=hash-mapping - size: 734215 - timestamp: 1718696048397 + size: 664111 + timestamp: 1725519932877 - kind: conda name: pyogrio version: 0.9.0 - build: py312hd215820_0 + build: py312hd215820_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_0.conda - sha256: 069f0782954c9000928f155c97965b51535eddee3feba87c2d86462056c50847 - md5: 389c4ae48840c02e25aadcfd6def0673 + url: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_2.conda + sha256: 3adf95bc11ab4352155e7273a8ba235230f85a13b0506dd143e0504fb292a40b + md5: 3656489c0c4514e769f26dea21c3dc21 depends: - gdal - - libgdal >=3.9.0,<3.10.0a0 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 - numpy - packaging - python >=3.12,<3.13.0a0 @@ -15397,79 +15621,79 @@ packages: license_family: MIT purls: - pkg:pypi/pyogrio?source=hash-mapping - size: 887820 - timestamp: 1718696645436 + size: 887063 + timestamp: 1725520279515 - kind: conda name: pyparsing - version: 3.1.2 + version: 3.1.4 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda - sha256: 06c77cb03e5dde2d939b216c99dd2db52ea93a4c7c599f3882f136005c359c7b - md5: b9a4dacf97241704529131a0dfc0494f + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + sha256: 8714a83f1aeac278b3eb33c7cb880c95c9a5924e7a5feeb9e87e7d0837afa085 + md5: 4d91352a50949d049cf9714c8563d433 depends: - python >=3.6 license: MIT license_family: MIT purls: - pkg:pypi/pyparsing?source=hash-mapping - size: 89455 - timestamp: 1709721146886 + size: 90129 + timestamp: 1724616224956 - kind: conda name: pyproj version: 3.6.1 - build: py312h01329cd_8 - build_number: 8 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h01329cd_8.conda - sha256: 4fa89049d4973861b416bb821de487a6b3d9dc3044d82830d4661865dd22a23d - md5: ca4d166cb45c07aff3bee9091866c7dd + build: py312h6f27134_9 + build_number: 9 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_9.conda + sha256: 42c2eaa5d75ad0e184e8b957ae07c9d9438fa611c8b896a9a387aaafe10dc3b2 + md5: a7414c734b08e74d22581a9a07686301 depends: - - __glibc >=2.17,<3.0.a0 - certifi - - libgcc-ng >=12 - proj >=9.4.1,<9.5.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - - pkg:pypi/pyproj?source=compressed-mapping - size: 546924 - timestamp: 1722370579043 + - pkg:pypi/pyproj?source=hash-mapping + size: 740323 + timestamp: 1725436440016 - kind: conda name: pyproj version: 3.6.1 - build: py312h6f27134_8 - build_number: 8 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_8.conda - sha256: 6d99757a71db359d787a0d7cd996e358a9ac9d86fdd8d0ea18ea1be6acc16b49 - md5: bd1f2ca75444327254d6b6c43312088a + build: py312h9211aeb_9 + build_number: 9 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda + sha256: 9525e8363d4b5509c7520f8b47a7011c0d1e8f66b294f17679093a3d15af086c + md5: 173afeb0d112c854fd1a9fcac4b5cce3 depends: + - __glibc >=2.17,<3.0.a0 - certifi + - libgcc >=13 - proj >=9.4.1,<9.5.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - - pkg:pypi/pyproj?source=compressed-mapping - size: 736072 - timestamp: 1722371076693 + - pkg:pypi/pyproj?source=hash-mapping + size: 547934 + timestamp: 1725436149519 - kind: conda name: pyproj version: 3.6.1 - build: py312ha320102_8 - build_number: 8 + build: py312haf32e09_9 + build_number: 9 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312ha320102_8.conda - sha256: daffe51c7c393cbb8e883b955aada0656ba99f4c0f908316b11fc6b6682a0a42 - md5: 75b178fbc1a36b1f45b1bb2cdcff8381 + url: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312haf32e09_9.conda + sha256: eaf202c9484db91c2e7c851dcbdbb900c5c68126a088f962ccc9605604dd8e69 + md5: 87547186b89fe0263b5ce2d638b3f958 depends: - __osx >=10.13 - certifi @@ -15479,18 +15703,18 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/pyproj?source=compressed-mapping - size: 481247 - timestamp: 1722370669062 + - pkg:pypi/pyproj?source=hash-mapping + size: 483738 + timestamp: 1725436121238 - kind: conda name: pyside6 version: 6.7.2 - build: py312h2ee7485_2 - build_number: 2 + build: py312h2ee7485_3 + build_number: 3 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_2.conda - sha256: 1576947d4c0dcf5fb04497aa350bae0cde45e09ae568d7760ff303a20620ad68 - md5: ee646594cba1942d42cb3bd280243fd2 + url: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_3.conda + sha256: bf437de4f749eba0e77e53faf0a49abb6ebb1c83787636cd2993a0d4c2a558a7 + md5: 18ab45e45cd109d6223e5f89f06ecb0a depends: - libclang13 >=18.1.8 - libxml2 >=2.12.7,<3.0a0 @@ -15503,25 +15727,25 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: LGPL-3.0-only - license_family: LGPL purls: - - pkg:pypi/pyside6?source=compressed-mapping - size: 9267851 - timestamp: 1723107384817 + - pkg:pypi/pyside6?source=hash-mapping + - pkg:pypi/shiboken6?source=hash-mapping + size: 9237358 + timestamp: 1726118783905 - kind: conda name: pyside6 version: 6.7.2 - build: py312hb5137db_2 - build_number: 2 + build: py312h91f0f75_3 + build_number: 3 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312hb5137db_2.conda - sha256: d270c55f5874867c2c258fcc54bda2bb9d03f2e9f2e184c3edd92a71f4deca2f - md5: 99889d0c042cc4dfb9a758619d487282 + url: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312h91f0f75_3.conda + sha256: 797e68f35d400abcb3eedc3ed10df1b2ca3d0c405d98721c821978c2f0666996 + md5: 19dba13e88e2d4800860edc05dda1c6a depends: - __glibc >=2.17,<3.0.a0 - libclang13 >=18.1.8 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 - libxml2 >=2.12.7,<3.0a0 - libxslt >=1.1.39,<2.0a0 - python >=3.12,<3.13.0a0 @@ -15529,11 +15753,11 @@ packages: - qt6-main 6.7.2.* - qt6-main >=6.7.2,<6.8.0a0 license: LGPL-3.0-only - license_family: LGPL purls: - - pkg:pypi/pyside6?source=compressed-mapping - size: 10639049 - timestamp: 1723107283396 + - pkg:pypi/pyside6?source=hash-mapping + - pkg:pypi/shiboken6?source=hash-mapping + size: 10600770 + timestamp: 1726118924165 - kind: conda name: pysocks version: 1.7.1 @@ -15575,13 +15799,13 @@ packages: timestamp: 1661604969727 - kind: conda name: pytest - version: 8.3.2 + version: 8.3.3 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.2-pyhd8ed1ab_0.conda - sha256: 72c84a3cd9fe82835a88e975fd2a0dbf2071d1c423ea4f79e7930578c1014873 - md5: e010a224b90f1f623a917c35addbb924 + url: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda + sha256: e99376d0068455712109d233f5790458ff861aeceb458bfda74e353338e4d815 + md5: c03d61f31f38fdb9facf70c29958bf7a depends: - colorama - exceptiongroup >=1.0.0rc8 @@ -15593,11 +15817,10 @@ packages: constrains: - pytest-faulthandler >=2 license: MIT - license_family: MIT purls: - - pkg:pypi/pytest?source=compressed-mapping - size: 257671 - timestamp: 1721923749407 + - pkg:pypi/pytest?source=hash-mapping + size: 258293 + timestamp: 1725977334143 - kind: conda name: pytest-cov version: 5.0.0 @@ -15844,21 +16067,20 @@ packages: timestamp: 1723823139725 - kind: conda name: pytz - version: '2024.1' + version: '2024.2' build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - sha256: 1a7d6b233f7e6e3bbcbad054c8fd51e690a67b129a899a056a5e45dd9f00cb41 - md5: 3eeeeb9e4827ace8c0c1419c85d590ad + url: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + sha256: 81c16d9183bb4a6780366ce874e567ee5fc903722f85b2f8d1d9479ef1dafcc9 + md5: 260009d03c9d5c0f111904d851f053dc depends: - python >=3.7 license: MIT - license_family: MIT purls: - pkg:pypi/pytz?source=hash-mapping - size: 188538 - timestamp: 1706886944988 + size: 186995 + timestamp: 1726055625738 - kind: conda name: pywin32 version: '306' @@ -15883,11 +16105,12 @@ packages: - kind: conda name: pywinpty version: 2.0.13 - build: py312h53d5487_0 + build: py312h275cf98_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h53d5487_0.conda - sha256: 56d95d00a0fe6170e6e0e1da6b0e1201291b8054a6342c0792bc4dd791a39088 - md5: 84bc43e330340c01ce93231c096d4ab1 + url: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda + sha256: a13cbe4c93ba756b36e85a5972b5902f89cc3a6cb09e8b65a542eb2e7426487a + md5: 7e164d22d6403d92b73dcacdeb6a5ff0 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -15899,57 +16122,60 @@ packages: license_family: MIT purls: - pkg:pypi/pywinpty?source=hash-mapping - size: 212261 - timestamp: 1708995486138 + size: 212342 + timestamp: 1724951397416 - kind: conda name: pyyaml version: 6.0.2 - build: py312h41a817b_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h41a817b_0.conda - sha256: 06a139ccc9a1472489ca5df6f7c6f44e2eb9b1c2de1142f5beec3f430ca7ae3c - md5: 1779c9cbd9006415ab7bb9e12747e9d1 + build: py312h4389bb4_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_1.conda + sha256: fa3ede1fa2ed6ea0a51095aeea398f6f0f54af036c4bc525726107cfb49229d5 + md5: afb7809721516919c276b45f847c085f depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 - yaml >=0.2.5,<0.3.0a0 license: MIT license_family: MIT purls: - - pkg:pypi/pyyaml?source=compressed-mapping - size: 205734 - timestamp: 1723018377857 + - pkg:pypi/pyyaml?source=hash-mapping + size: 181227 + timestamp: 1725456516473 - kind: conda name: pyyaml version: 6.0.2 - build: py312h4389bb4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_0.conda - sha256: 2413377ce0fd4eee66eaf5450d0200cd9124acfb9fc7932dcdc2f618bc8e840e - md5: a64ca370389c8bfacf848f40654ffc04 + build: py312h66e93f0_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda + sha256: a60705971e958724168f2ebbb8ed4853067f1d3f7059843df3903e3092bbcffa + md5: 549e5930e768548a89c23f595dac5a95 depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - yaml >=0.2.5,<0.3.0a0 license: MIT license_family: MIT purls: - - pkg:pypi/pyyaml?source=compressed-mapping - size: 181385 - timestamp: 1723018911152 + - pkg:pypi/pyyaml?source=hash-mapping + size: 206553 + timestamp: 1725456256213 - kind: conda name: pyyaml version: 6.0.2 - build: py312hbd25219_0 + build: py312hb553811_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hbd25219_0.conda - sha256: dfc405e4c08edd587893ff0300140814838508d92e4ef1f8a1f8f35527108380 - md5: 3d847d381481b9bd802c2735e08f0c43 + url: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hb553811_1.conda + sha256: 455ce40588b35df654cb089d29cc3f0d3c78365924ffdfc6ee93dba80cea5f33 + md5: 66514594817d51c78db7109a23ad322f depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -15958,62 +16184,65 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/pyyaml?source=compressed-mapping - size: 190172 - timestamp: 1723018420621 + - pkg:pypi/pyyaml?source=hash-mapping + size: 189347 + timestamp: 1725456465705 - kind: conda name: pyzmq - version: 26.1.0 - build: py312h7a17523_0 + version: 26.2.0 + build: py312h54d5c6a_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.1.0-py312h7a17523_0.conda - sha256: db689576d2239f52256eccd8b7b7ff66b4667b14b85c3cd7bcf22aa82611bd13 - md5: 9486b416e47ee1a7a4051aad39240f02 + url: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda + sha256: 6c412ab7f2ff2f112f53888913a9505518789a9c6d39ba9ad57d26a26f1c1b96 + md5: de7dc71e825ef8745051e1439935a244 depends: - __osx >=10.13 - - libcxx >=16 - - libsodium >=1.0.18,<1.0.19.0a0 + - libcxx >=17 + - libsodium >=1.0.20,<1.0.21.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - zeromq >=4.3.5,<4.4.0a0 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/pyzmq?source=compressed-mapping - size: 363329 - timestamp: 1722971866456 + - pkg:pypi/pyzmq?source=hash-mapping + size: 362401 + timestamp: 1725449326748 - kind: conda name: pyzmq - version: 26.1.0 - build: py312h7ab5c7e_0 + version: 26.2.0 + build: py312hbf22597_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.1.0-py312h7ab5c7e_0.conda - sha256: 506dfa9939e2a36bd52afc586f82fda91d3e718c705738b11842f35f35510953 - md5: 53f323d819ee9bd141667865425cc8d2 + url: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda + sha256: a2431644cdef4111f7120565090114f52897e687e83c991bd76a3baef8de77c4 + md5: 44f46ddfdd01d242d2fff2d69a0d7cba depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libsodium >=1.0.18,<1.0.19.0a0 - - libstdcxx-ng >=12 + - libgcc >=13 + - libsodium >=1.0.20,<1.0.21.0a0 + - libstdcxx >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - zeromq >=4.3.5,<4.4.0a0 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/pyzmq?source=compressed-mapping - size: 378633 - timestamp: 1722971803299 + - pkg:pypi/pyzmq?source=hash-mapping + size: 378667 + timestamp: 1725449078945 - kind: conda name: pyzmq - version: 26.1.0 - build: py312hd7027bb_0 + version: 26.2.0 + build: py312hd7027bb_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.1.0-py312hd7027bb_0.conda - sha256: 8710241a86d9f6d55fccf9c4a10cb519e552ff5f55d178c9a6cb18c7c2b4d1b4 - md5: f03b5823588a12062dfb02295805f060 + url: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda + sha256: b148a68de6fc13f7d760936f72a240bf49049ded5a55c3b372581a2f1ea83655 + md5: 4b52a5f41750f313d59704d09120a02f depends: - - libsodium >=1.0.18,<1.0.19.0a0 + - libsodium >=1.0.20,<1.0.21.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - ucrt >=10.0.20348.0 @@ -16023,9 +16252,9 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/pyzmq?source=compressed-mapping - size: 360916 - timestamp: 1722972158192 + - pkg:pypi/pyzmq?source=hash-mapping + size: 360878 + timestamp: 1725449586300 - kind: conda name: qhull version: '2020.2' @@ -16079,12 +16308,12 @@ packages: - kind: conda name: qt6-main version: 6.7.2 - build: hb12f9c5_4 - build_number: 4 + build: hb12f9c5_5 + build_number: 5 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_4.conda - sha256: 619c1ea79ddca804e2eb020c5c58a0d9127203bdd98035c72bbaf947ab9e19bd - md5: 5dd4fddb73e5e4fef38ef54f35c155cd + url: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_5.conda + sha256: 712c5e6fef0b121bd62d941f8e11fff2ac5e1b36b7af570f4465f51e14193104 + md5: 8c662388c2418f293266f5e7f50df7d7 depends: - __glibc >=2.17,<3.0.a0 - alsa-lib >=1.2.12,<1.3.0a0 @@ -16100,12 +16329,14 @@ packages: - libclang13 >=18.1.8 - libcups >=2.3.3,<2.4.0a0 - libdrm >=2.4.122,<2.5.0a0 + - libegl >=1.7.0,<2.0a0 - libgcc-ng >=12 + - libgl >=1.7.0,<2.0a0 - libglib >=2.80.3,<3.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libllvm18 >=18.1.8,<18.2.0a0 - libpng >=1.6.43,<1.7.0a0 - - libpq >=16.3,<17.0a0 + - libpq >=16.4,<17.0a0 - libsqlite >=3.46.0,<4.0a0 - libstdcxx-ng >=12 - libtiff >=4.6.0,<4.7.0a0 @@ -16114,7 +16345,7 @@ packages: - libxkbcommon >=1.7.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - - mysql-libs >=8.3.0,<8.4.0a0 + - mysql-libs >=9.0.1,<9.1.0a0 - openssl >=3.3.1,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - wayland >=1.23.0,<2.0a0 @@ -16128,23 +16359,25 @@ packages: - xorg-libsm >=1.2.4,<2.0a0 - xorg-libx11 >=1.8.9,<2.0a0 - xorg-libxext >=1.3.4,<2.0a0 + - xorg-libxtst >=1.2.5,<2.0a0 + - xorg-libxxf86vm >=1.1.5,<2.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - qt 6.7.2 license: LGPL-3.0-only license_family: LGPL purls: [] - size: 46508789 - timestamp: 1721426751589 + size: 46904534 + timestamp: 1724536870579 - kind: conda name: qt6-main version: 6.7.2 - build: hbb46ec1_4 - build_number: 4 + build: hbb46ec1_5 + build_number: 5 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_4.conda - sha256: 9abbea17737708356919930cad357e63fe1df40106eeb1114a74e523ff620930 - md5: 11c572c84b282f085c0379d6b5a6db19 + url: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_5.conda + sha256: 23d5e8864e9957c00546be554171e3c4415a7e0670870bd361db8e28e0be716e + md5: e14fa5fe2da0bf8cc30d06314ce6ce33 depends: - double-conversion >=3.3.0,<3.4.0a0 - harfbuzz >=9.0.0,<10.0a0 @@ -16169,77 +16402,74 @@ packages: license: LGPL-3.0-only license_family: LGPL purls: [] - size: 88624419 - timestamp: 1721430217503 + size: 85902078 + timestamp: 1724537977958 - kind: conda name: quarto - version: 1.5.55 + version: 1.5.57 build: h57928b3_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.55-h57928b3_0.conda - sha256: 2787e5b88c35ad1b76ec8899526fdaeb3aff9285fb5ee23a1a47c688e41af149 - md5: 410400803f8c0159b73450c52db7c66a + url: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.57-h57928b3_0.conda + sha256: 92a40f8745068fb2d6af4b3bf990a5084913df38862f87fefcbce8e62fd2a314 + md5: 353bf33225a5596289bb616a4c00cc62 depends: - dart-sass - deno >=1.41.0,<1.41.1.0a0 - deno-dom >=0.1.35,<0.1.36.0a0 - esbuild - - pandoc >=3.2.1,<3.2.2.0a0 - - typst 0.11.0.* + - pandoc 3.2 + - typst 0.11.0 license: GPL-2.0-or-later - license_family: GPL purls: [] - size: 15733549 - timestamp: 1722533252920 + size: 15860524 + timestamp: 1725910705704 - kind: conda name: quarto - version: 1.5.55 + version: 1.5.57 build: h694c41f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.55-h694c41f_0.conda - sha256: e748018f09ed6d1ba0fd2530856d1b57f199b1ca9e952318be19d9d238def62d - md5: 12f4daf7f1257f253ee68083a2eccfd1 + url: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.57-h694c41f_0.conda + sha256: ee84ab2bc0991b501378d4a1c5a2d5a6e90038a4406e570db2ab0334550db888 + md5: 22f51e78ab9f09e1352bf5b7625225d5 depends: - dart-sass - deno >=1.41.0,<1.41.1.0a0 - deno-dom >=0.1.35,<0.1.36.0a0 - esbuild - - pandoc >=3.2.1,<3.2.2.0a0 - - typst 0.11.0.* + - pandoc 3.2 + - typst 0.11.0 license: GPL-2.0-or-later - license_family: GPL purls: [] - size: 15919485 - timestamp: 1722533041532 + size: 15729834 + timestamp: 1725910469278 - kind: conda name: quarto - version: 1.5.55 + version: 1.5.57 build: ha770c72_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.55-ha770c72_0.conda - sha256: b5eccf81660520c3dcf24c0cdee28cbc78ccafc3162a24412cff388f3b85ec1d - md5: 9ea2162c896bb43b500012f2a9d9f2d9 + url: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.57-ha770c72_0.conda + sha256: b8bccc243d6ed27d2511662601c2f3e8fa1f371fd71d40ea3ffcb15dc3192676 + md5: dd6838c663e9b5a1b18048a164ccb90b depends: - dart-sass - deno >=1.41.0,<1.41.1.0a0 - deno-dom >=0.1.35,<0.1.36.0a0 - esbuild - - pandoc >=3.2.1,<3.2.2.0a0 - - typst 0.11.0.* + - pandoc 3.2 + - typst 0.11.0 license: GPL-2.0-or-later - license_family: GPL purls: [] - size: 15720754 - timestamp: 1722532945632 + size: 16059182 + timestamp: 1725910489963 - kind: conda name: quartodoc - version: 0.7.5 + version: 0.7.6 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.5-pyhd8ed1ab_0.conda - sha256: b274b1715455b4cc629fff973fa74cf7c712072d25c24fc1c01396778d3be567 - md5: e792d8c21d24dbb2b880c07430328536 + url: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + sha256: aa8b66a33d88ad834956ab4ec5993f0858ecbd243156cc45f6b1ab7443378617 + md5: 02da3ef68978e1d6d61ded2338b2ee8b depends: - click - griffe >=0.33 @@ -16258,17 +16488,16 @@ packages: license_family: MIT purls: - pkg:pypi/quartodoc?source=hash-mapping - size: 65878 - timestamp: 1718900565226 + size: 65713 + timestamp: 1724510613002 - kind: conda name: rasterio - version: 1.3.10 - build: py312h1c98354_5 - build_number: 5 + version: 1.3.11 + build: py312h4104ae2_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.10-py312h1c98354_5.conda - sha256: dc8cc73a672db049d0dcff04edeb290214a470ce8a7b7e541642fe09ce3c4240 - md5: 18896dd74aad55f12b199e415cf98334 + url: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.11-py312h4104ae2_0.conda + sha256: 4cdc05af7bb4841b94cadb2433f32242180d80401a324917c7915f0281f2f446 + md5: 0e9793e312dae8b4e34b4d54db6ae5c7 depends: - __osx >=10.13 - affine @@ -16277,9 +16506,9 @@ packages: - click >=4 - click-plugins - cligj >=0.5 - - libcxx >=16 - - libgdal >=3.9.1,<3.10.0a0 - - libgdal-core >=3.9.1,<3.10.0a0 + - libcxx >=17 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 - numpy >=1.19,<3 - proj >=9.4.1,<9.5.0a0 - python >=3.12,<3.13.0a0 @@ -16289,75 +16518,73 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/rasterio?source=compressed-mapping - size: 7215413 - timestamp: 1722410316411 + - pkg:pypi/rasterio?source=hash-mapping + size: 6954057 + timestamp: 1725459093401 - kind: conda name: rasterio - version: 1.3.10 - build: py312he4a2ebf_5 - build_number: 5 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.10-py312he4a2ebf_5.conda - sha256: 491fde21a4feef76cc6925eb43c2fa992979cd89790fe96911138abc349ee243 - md5: 823a1e4e94e2c93700e278bbc9046ad0 + version: 1.3.11 + build: py312hd177ed6_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_0.conda + sha256: 210bddb89d3063fe6b63f9d0d9a0b48c9776e62120e0db82527d1255f6944f12 + md5: 996cf1c27ebf9466c00fc28b0080e9a1 depends: + - __glibc >=2.17,<3.0.a0 - affine - attrs - certifi - click >=4 - click-plugins - cligj >=0.5 - - libgdal >=3.9.1,<3.10.0a0 - - libgdal-core >=3.9.1,<3.10.0a0 + - libgcc >=13 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 + - libstdcxx >=13 - numpy >=1.19,<3 - proj >=9.4.1,<9.5.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - setuptools >=0.9.8 - snuggs >=1.4.1 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/rasterio?source=compressed-mapping - size: 7372491 - timestamp: 1722410788505 + - pkg:pypi/rasterio?source=hash-mapping + size: 7247649 + timestamp: 1725458951562 - kind: conda name: rasterio - version: 1.3.10 - build: py312hff7f44f_5 - build_number: 5 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.10-py312hff7f44f_5.conda - sha256: e29a4f69492b515611f0b95785945f4e45a422aaccb58a0dc397b0923f36ff97 - md5: 8e7d16b0cefed27fb9367e9a47b7128d + version: 1.3.11 + build: py312he4a2ebf_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.11-py312he4a2ebf_0.conda + sha256: 394a53ccf0ed3769992b8e0fe2c83b53b1da8a9a5efb4e60abd6b67f135443a7 + md5: b172bdda0a0f039a35d621e56dd2cd36 depends: - - __glibc >=2.17,<3.0.a0 - affine - attrs - certifi - click >=4 - click-plugins - cligj >=0.5 - - libgcc-ng >=12 - - libgdal >=3.9.1,<3.10.0a0 - - libgdal-core >=3.9.1,<3.10.0a0 - - libstdcxx-ng >=12 + - libgdal >=3.9.2,<3.10.0a0 + - libgdal-core >=3.9.2,<3.10.0a0 - numpy >=1.19,<3 - proj >=9.4.1,<9.5.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - setuptools >=0.9.8 - snuggs >=1.4.1 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/rasterio?source=compressed-mapping - size: 7598387 - timestamp: 1722410413212 + - pkg:pypi/rasterio?source=hash-mapping + size: 7139285 + timestamp: 1725459641246 - kind: conda name: rasterstats version: 0.19.0 @@ -16545,7 +16772,7 @@ packages: name: ribasim version: 2024.10.0 path: ../Ribasim/python/ribasim - sha256: dabdd02a24d0b7d6a745c1bcdd800f8afdeb527b8b294ded4eaaf7205939a9cd + sha256: c48692687129085ad19256cbf54c8df9853f6259b31997cc4a282fde86072751 requires_dist: - geopandas - matplotlib @@ -16615,24 +16842,24 @@ packages: editable: true - kind: conda name: rich - version: 13.7.1 + version: 13.8.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.7.1-pyhd8ed1ab_0.conda - sha256: 2b26d58aa59e46f933c3126367348651b0dab6e0bf88014e857415bb184a4667 - md5: ba445bf767ae6f0d959ff2b40c20912b + url: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda + sha256: eb7d88222ec1a05c1b333aab5ca9bf486f2005f17c0d86a6b7653da47c6c143b + md5: 748f1807fa7fda73651795c5617b9557 depends: - markdown-it-py >=2.2.0 - pygments >=2.13.0,<3.0.0 - - python >=3.7.0 + - python >=3.7 - typing_extensions >=4.0.0,<5.0.0 license: MIT license_family: MIT purls: - pkg:pypi/rich?source=hash-mapping - size: 184347 - timestamp: 1709150578093 + size: 185358 + timestamp: 1726066139954 - kind: conda name: rioxarray version: 0.17.0 @@ -16659,72 +16886,75 @@ packages: - kind: conda name: rpds-py version: 0.20.0 - build: py312h2615798_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_0.conda - sha256: b88622d69e0a1da6c9fb83a75c9e86c3efb41644fe96d24cd0c1f7d699484b52 - md5: a8ec26c7605929b408fa2c3ddd613fd2 + build: py312h12e396e_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda + sha256: c1b876198b565af674e3cbc66d872791e09d6b10ca2c663b1cec40517f836509 + md5: 9ae193ac9c1ead5024d5a4ee0024e9a6 depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + constrains: + - __glibc >=2.17 license: MIT license_family: MIT purls: - - pkg:pypi/rpds-py?source=compressed-mapping - size: 209775 - timestamp: 1723040158005 + - pkg:pypi/rpds-py?source=hash-mapping + size: 334627 + timestamp: 1725327239912 - kind: conda name: rpds-py version: 0.20.0 - build: py312ha47ea1c_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312ha47ea1c_0.conda - sha256: 8c6923581b1db573c21a0abbfe0a9387f1e88156dcdf04c7b3f6001ca7b2ba1e - md5: c3ee2963d7cfd5daaec1006793c99507 + build: py312h2615798_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_1.conda + sha256: c33ac1e86925563c8b119a059111fe13196d8786ec1dea144c35737e620db283 + md5: 3346e30a5df4a407f0426646dc35ccd6 depends: - - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - constrains: - - __osx >=10.13 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - - pkg:pypi/rpds-py?source=compressed-mapping - size: 298956 - timestamp: 1723039412050 + - pkg:pypi/rpds-py?source=hash-mapping + size: 209063 + timestamp: 1725327883530 - kind: conda name: rpds-py version: 0.20.0 - build: py312hf008fa9_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312hf008fa9_0.conda - sha256: c1c797db876a3a642fd1293be3ce5428f2699cbc1e1f2f9152501e656b897c24 - md5: 0735929f1a2a89c62b91d07ef5a76645 + build: py312h669792a_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312h669792a_1.conda + sha256: 096a81dfd970f616cf882f75d7f953e0529af956c8a929ac0f30033dec27f92e + md5: b5c33da663aab65eec94188effd0ebee depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 constrains: - - __glibc >=2.17 + - __osx >=10.13 license: MIT license_family: MIT purls: - - pkg:pypi/rpds-py?source=compressed-mapping - size: 336290 - timestamp: 1723039277393 + - pkg:pypi/rpds-py?source=hash-mapping + size: 299893 + timestamp: 1725327367863 - kind: conda name: ruff - version: 0.6.1 - build: py312h7a6832a_0 + version: 0.6.4 + build: py312h881003e_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.1-py312h7a6832a_0.conda - sha256: e5c954b67cf00b89d1b64da652d2dae774f849ae7961a52d8c226fe9f39336fd - md5: 692adc6fcbcf83e0a23dfc5783d141cc + url: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.4-py312h881003e_0.conda + sha256: 82c8fc3fecece3fa6db6d88be239ba62b407c09071df7705664cbfaf7550b388 + md5: 302b3f9a3d88d6d535da9d8fe663eb7d depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -16734,102 +16964,101 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/ruff?source=compressed-mapping - size: 6338756 - timestamp: 1723845582188 + - pkg:pypi/ruff?source=hash-mapping + size: 6455872 + timestamp: 1725619373056 - kind: conda name: ruff - version: 0.6.1 - build: py312h8b25c6c_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.1-py312h8b25c6c_0.conda - sha256: 4150163a68d709bb43b825661a0351ee6ef434c95ca4e50c6d722e96187fa434 - md5: cb868f88436af1db6df8b5de992360df + version: 0.6.4 + build: py312hd18ad41_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.4-py312hd18ad41_0.conda + sha256: 64e89828218eb52ba71fee66d74fbc19817ca0f914cb6e9ad3c82423e9f6d40e + md5: bbb52fcabbc926d506bed70d70e44776 depends: - - __osx >=10.13 - - libcxx >=16 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - constrains: - - __osx >=10.13 license: MIT license_family: MIT purls: - - pkg:pypi/ruff?source=compressed-mapping - size: 6192936 - timestamp: 1723844613061 + - pkg:pypi/ruff?source=hash-mapping + size: 6554879 + timestamp: 1725618160547 - kind: conda name: ruff - version: 0.6.1 - build: py312hbe4c86d_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.1-py312hbe4c86d_0.conda - sha256: 56681b77861dfb08f656248c5707c23ed9c3fb7758531cabb563f008a7c4f4a6 - md5: 15153af670ac4d72f188aecdb4bc6119 + version: 0.6.4 + build: py312he6c0bb9_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.4-py312he6c0bb9_0.conda + sha256: 386e02becf61164e38b896ae9e3782d69aa34e6ef63013afd88284811e1674cd + md5: ff1f5ec398a38d04b42d0d62a962f0b9 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - __osx >=10.13 + - libcxx >=17 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 constrains: - - __glibc >=2.17 + - __osx >=10.12 license: MIT license_family: MIT purls: - - pkg:pypi/ruff?source=compressed-mapping - size: 7216969 - timestamp: 1723844217866 + - pkg:pypi/ruff?source=hash-mapping + size: 6298299 + timestamp: 1725618483850 - kind: conda name: s2n - version: 1.5.0 - build: h3400bea_0 + version: 1.5.2 + build: h7b32b05_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.0-h3400bea_0.conda - sha256: 594878a49b1c4d657795f80ffbe87f15a16cd2162f28383a5b794d301d6cbc65 - md5: 5f17883266c5312a1fc73583f28ebae5 + url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.2-h7b32b05_0.conda + sha256: a08afbf88cf0d298da69118c12432ab76d4c2bc2972b2f9b87de95b2530cfae8 + md5: daf6322364fe6fc46c515d4d3d0051c2 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - openssl >=3.3.1,<4.0a0 + - libgcc >=13 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 353483 - timestamp: 1723253710366 + size: 351882 + timestamp: 1725682764682 - kind: conda name: scikit-learn - version: 1.5.1 - build: py312h775a589_0 + version: 1.5.2 + build: py312h7a48858_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.1-py312h775a589_0.conda - sha256: cf9735937209d01febf1f912559e28dc3bb753906460e5b85dc24f0d57a78d96 - md5: bd8c79ccb9498336cbb174cf0151024a + url: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda + sha256: 3118b687c7cfb4484cc5c65591b611d834e3ea2424cb75e1e0b0980d0de72afc + md5: 6b5f4c68483bd0c22bca9094dafc606b depends: - __glibc >=2.17,<3.0.a0 - _openmp_mutex >=4.5 - joblib >=1.2.0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - scipy - threadpoolctl >=3.1.0 license: BSD-3-Clause - license_family: BSD purls: - pkg:pypi/scikit-learn?source=hash-mapping - size: 10384469 - timestamp: 1719998679827 + size: 10393222 + timestamp: 1726083382159 - kind: conda name: scikit-learn - version: 1.5.1 - build: py312h816cc57_0 + version: 1.5.2 + build: py312h816cc57_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.1-py312h816cc57_0.conda - sha256: 00ab427eaebdc17816655ec7d116de9511a8ad04020fc47e0b4bc5dcfc46bbbb - md5: fa83d73ec4a87352b6bbfcdfde5aeab2 + url: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.2-py312h816cc57_1.conda + sha256: 7c64942d20339e965c22e27ceca72e0f0ff7d32962d9621903c3812714835f4f + md5: e2b5c3288bd3f8e89a46b98f8d9e8768 depends: - joblib >=1.2.0 - numpy >=1.19,<3 @@ -16841,49 +17070,47 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: BSD-3-Clause - license_family: BSD purls: - pkg:pypi/scikit-learn?source=hash-mapping - size: 9225862 - timestamp: 1719999149012 + size: 9215977 + timestamp: 1726083836746 - kind: conda name: scikit-learn - version: 1.5.1 - build: py312hc214ba5_0 + version: 1.5.2 + build: py312h9d777eb_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.1-py312hc214ba5_0.conda - sha256: 62a33e1266c9e2e99e5bb68127160e04a592b62e553faa4f6ad2df264b9654f0 - md5: 32625e0f29884a4704070c07a25edf94 + url: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.2-py312h9d777eb_1.conda + sha256: f02c5ccc044afd85ce8bfb4504526ad2d65b24d11541145d5423a5f3abc7e19c + md5: 258180f3d58e64d6a0be0abf2b125944 depends: - __osx >=10.13 - joblib >=1.2.0 - - libcxx >=16 - - llvm-openmp >=16.0.6 + - libcxx >=17 + - llvm-openmp >=17.0.6 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - scipy - threadpoolctl >=3.1.0 license: BSD-3-Clause - license_family: BSD purls: - pkg:pypi/scikit-learn?source=hash-mapping - size: 9488534 - timestamp: 1719998895551 + size: 9479906 + timestamp: 1726083214500 - kind: conda name: scipy - version: 1.14.0 - build: py312h1f4e10d_2 - build_number: 2 + version: 1.14.1 + build: py312h1f4e10d_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.0-py312h1f4e10d_2.conda - sha256: beb4737c20a8d6bf864d937761f323569402baa133b121bd35d7f81c27ca4dc6 - md5: bcdfde2d67d09e76e27ae1f634772c6c + url: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda + sha256: 8f70ded1b7b469d61f6f7a580c541538a0275e05a0ca2def60cb95555d06e7e3 + md5: 075ca2339855d696007b35110b83d958 depends: - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 - liblapack >=3.9.0,<4.0a0 - - numpy >=1.23.5,<2.3 + - numpy <2.3 - numpy >=1.19,<3 - numpy >=1.23.5 - python >=3.12,<3.13.0a0 @@ -16894,28 +17121,27 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/scipy?source=compressed-mapping - size: 15915111 - timestamp: 1723857062798 + - pkg:pypi/scipy?source=hash-mapping + size: 16013280 + timestamp: 1724329197087 - kind: conda name: scipy - version: 1.14.0 - build: py312h499d17b_2 - build_number: 2 + version: 1.14.1 + build: py312h7d485d2_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.0-py312h499d17b_2.conda - sha256: a84bd33593abc9b6acadcd31410f9a028031f00c2a8db33255f2358570673346 - md5: fbb459d6590fad7bd00aeb1665bb67d1 + url: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda + sha256: 79903e307183e08b19c7ef607672fd304ed4968b2a7530904147aa79536e70d1 + md5: 7418a22e73008356d9aba99d93dfeeee depends: - __glibc >=2.17,<3.0.a0 - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 - - libgcc-ng >=12 + - libgcc-ng >=13 - libgfortran-ng - - libgfortran5 >=12.4.0 + - libgfortran5 >=13.3.0 - liblapack >=3.9.0,<4.0a0 - - libstdcxx-ng >=12 - - numpy >=1.23.5,<2.3 + - libstdcxx-ng >=13 + - numpy <2.3 - numpy >=1.19,<3 - numpy >=1.23.5 - python >=3.12,<3.13.0a0 @@ -16923,28 +17149,26 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/scipy?source=compressed-mapping - size: 17610049 - timestamp: 1723856153431 + - pkg:pypi/scipy?source=hash-mapping + size: 17700161 + timestamp: 1724328333870 - kind: conda name: scipy - version: 1.14.0 - build: py312hb9702fa_2 - build_number: 2 + version: 1.14.1 + build: py312he82a568_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.0-py312hb9702fa_2.conda - sha256: 5b7c993667836317f39d55046abff58eee468939675bda3f59872e2e64e2d819 - md5: 610311c8f21dbbf294157f03922e9ca8 + url: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda + sha256: 21339aad0646f5c841ded61a2dae6fa46cef86d691098fd6160c5311e0a86454 + md5: dd3c55da62964fcadf27771e1928e67f depends: - __osx >=10.13 - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 - - libcxx >=16 + - libcxx >=17 - libgfortran 5.* - - libgfortran5 >=12.3.0 - libgfortran5 >=13.2.0 - liblapack >=3.9.0,<4.0a0 - - numpy >=1.23.5,<2.3 + - numpy <2.3 - numpy >=1.19,<3 - numpy >=1.23.5 - python >=3.12,<3.13.0a0 @@ -16952,9 +17176,9 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/scipy?source=compressed-mapping - size: 16131947 - timestamp: 1723855841034 + - pkg:pypi/scipy?source=hash-mapping + size: 16322022 + timestamp: 1724328432301 - kind: conda name: send2trash version: 1.8.3 @@ -17013,29 +17237,30 @@ packages: timestamp: 1712585816346 - kind: conda name: setuptools - version: 72.1.0 + version: 73.0.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/setuptools-72.1.0-pyhd8ed1ab_0.conda - sha256: d239e7f1b1a5617eeadda4e91183592f5a15219e97e16bc721d7b0597ee89a80 - md5: e06d4c26df4f958a8d38696f2c344d15 + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda + sha256: c9f5e110e3fe5a7c4cd5b9da445c05a1fae000b43ab3a97cb6a501f4267515fc + md5: f0b618d7673d1b2464f600b34d912f6f depends: - python >=3.8 license: MIT license_family: MIT purls: - pkg:pypi/setuptools?source=compressed-mapping - size: 1462612 - timestamp: 1722586785703 + size: 1460460 + timestamp: 1725348602179 - kind: conda name: shapely - version: 2.0.5 - build: py312h3a88d77_0 + version: 2.0.6 + build: py312h3a88d77_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.5-py312h3a88d77_0.conda - sha256: 723a8f400918b63626cd49ab3364a661fe07c70c3e022431fc610e196a14b49f - md5: b4a76f36780fa55cd2104f2970ff6bed + url: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h3a88d77_1.conda + sha256: 1af8c26dc5507f60e4459975d228e7e329119ec9311ebb88c98603838a9871d2 + md5: 4f5c4f3160397a63c7b15c81f6c0e9b3 depends: - geos >=3.12.2,<3.12.3.0a0 - numpy >=1.19,<3 @@ -17048,19 +17273,21 @@ packages: license_family: BSD purls: - pkg:pypi/shapely?source=hash-mapping - size: 535191 - timestamp: 1720886719216 + size: 534109 + timestamp: 1725394700590 - kind: conda name: shapely - version: 2.0.5 - build: py312h594820c_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.5-py312h594820c_0.conda - sha256: 15a9a4bf93032bc5714c9c98fc7028883e6ffcca69d91fe1dc3e619119bc5045 - md5: b7c1b53fdae47febbde2e25e207c7389 + version: 2.0.6 + build: py312h6cab151_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda + sha256: b7818c7264926401b78b0afc9a7d2c98ff0fc0ed637ad9e5c126da38a40382f7 + md5: 5be02e05e1adaa42826cc6800ce399bc depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - geos >=3.12.2,<3.12.3.0a0 + - libgcc >=13 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -17068,20 +17295,20 @@ packages: license_family: BSD purls: - pkg:pypi/shapely?source=hash-mapping - size: 538432 - timestamp: 1720886351691 + size: 571255 + timestamp: 1725394110104 - kind: conda name: shapely - version: 2.0.5 - build: py312h8413631_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.5-py312h8413631_0.conda - sha256: 7ed7ec680dce240f74aca2ddfc6b69487841645237683612e082084fc880a9a9 - md5: 3e67354b24c7ee057ddee367f310ad3e + version: 2.0.6 + build: py312h8047845_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h8047845_1.conda + sha256: 8dbacc971bc78375537f4fc8879cb8d957f08d98622bf6262eaf3f8ea9cb31b3 + md5: 2f4d580436a2f2162275c9994dd706cb depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - geos >=3.12.2,<3.12.3.0a0 - - libgcc-ng >=12 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -17089,55 +17316,58 @@ packages: license_family: BSD purls: - pkg:pypi/shapely?source=hash-mapping - size: 568517 - timestamp: 1720886333536 + size: 538593 + timestamp: 1725394128298 - kind: conda name: simplejson version: 3.19.3 - build: py312h41a817b_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h41a817b_0.conda - sha256: 0d0de00e5570c37818836d1eb15b44cca47dffbad34c4100add83d796443b276 - md5: 5ee99b39f0c3b74a2c6da28752a9b761 + build: py312h4389bb4_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_1.conda + sha256: 6e6b495693ee61cb97a75661c85fdfc81e49f83a1a06be376f1287ce199e48ec + md5: 70b86ae596a0e6eac3a1a0183053c093 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - - pkg:pypi/simplejson?source=compressed-mapping - size: 130202 - timestamp: 1723702839180 + - pkg:pypi/simplejson?source=hash-mapping + size: 129004 + timestamp: 1724955521227 - kind: conda name: simplejson version: 3.19.3 - build: py312h4389bb4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_0.conda - sha256: 97a519abcb3976b1ecdd135c785b9012bc1e432e098ae5a781293c6a93a4ca06 - md5: a2da9923ed7f140bdc687a59b6d410ad + build: py312h66e93f0_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda + sha256: 811ed3d952b3ec1ab7d7ce3e68c6dd06f19dc591638d859e7900260968bd1c5f + md5: c8d1a609d5f3358d715c2273011d9f4d depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - - pkg:pypi/simplejson?source=compressed-mapping - size: 129197 - timestamp: 1723703355462 + - pkg:pypi/simplejson?source=hash-mapping + size: 130673 + timestamp: 1724955185225 - kind: conda name: simplejson version: 3.19.3 - build: py312hbd25219_0 + build: py312hb553811_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hbd25219_0.conda - sha256: b6b12ee88ed395f268efb7671028ab27851bd5684810c77d40e91ae996430ca7 - md5: d6843b9f409068e72ec00131f6262692 + url: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hb553811_1.conda + sha256: 7ae04555d4d1ca64eedac47c4ee753360e2121d36640e439c3a4cf019c526ca1 + md5: baa3415736e3626c23f05dedada48a4b depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -17145,9 +17375,9 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/simplejson?source=compressed-mapping - size: 128705 - timestamp: 1723702909479 + - pkg:pypi/simplejson?source=hash-mapping + size: 128382 + timestamp: 1724955227599 - kind: conda name: six version: 1.16.0 @@ -17248,7 +17478,7 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/snuggs?source=compressed-mapping + - pkg:pypi/snuggs?source=hash-mapping size: 11131 timestamp: 1722610712753 - kind: conda @@ -17364,57 +17594,58 @@ packages: timestamp: 1716387030819 - kind: conda name: sqlite - version: 3.46.0 + version: 3.46.1 build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.0-h2466b09_0.conda - sha256: 204edea00bb813d1e3da31dcd8caf1cb355ded08be3065ca53dea066bf75b827 - md5: f60e557d64002fe9955b929226adf81d + url: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.1-h2466b09_0.conda + sha256: fdee2e0c16ece695fde231d80242121b5ff610a4f66164f931e2a7622815c3ae + md5: 19c50225f5fbbb15d80063a68e52c8bb depends: - - libsqlite 3.46.0 h2466b09_0 + - libsqlite 3.46.1 h2466b09_0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Unlicense purls: [] - size: 885699 - timestamp: 1718051144579 + size: 886067 + timestamp: 1725354209514 - kind: conda name: sqlite - version: 3.46.0 - build: h28673e1_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/sqlite-3.46.0-h28673e1_0.conda - sha256: 7d868d34348615450c43cb4737b44987a0e45fdf4759502b323494dc8c931409 - md5: b76e50276ebb3131cb84aac8123ca75d + version: 3.46.1 + build: h9eae976_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.1-h9eae976_0.conda + sha256: 8c6245f988a2e1f4eef8456726b9cc46f2462448e61daa4bad2f9e4ca601598a + md5: b2b3e737da0ae347e16ef1970a5d3f14 depends: - - __osx >=10.13 - - libsqlite 3.46.0 h1b8f9f3_0 - - libzlib >=1.2.13,<2.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libsqlite 3.46.1 hadc24fc_0 + - libzlib >=1.3.1,<2.0a0 - ncurses >=6.5,<7.0a0 - readline >=8.2,<9.0a0 license: Unlicense purls: [] - size: 912413 - timestamp: 1718050767696 + size: 859188 + timestamp: 1725353670478 - kind: conda name: sqlite - version: 3.46.0 - build: h6d4b2fc_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.0-h6d4b2fc_0.conda - sha256: e849d576e52bf3e6fc5786f89b7d76978f2e2438587826c95570324cb572e52b - md5: 77ea8dff5cf8550cc8f5629a6af56323 + version: 3.46.1 + build: he26b093_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/sqlite-3.46.1-he26b093_0.conda + sha256: 668dcc8c38aabf42de440f682de4afe11f390b1dc5b49e09b34501bbf19571c8 + md5: 56a8cc349cf8e2310ee0e52f90247dab depends: - - libgcc-ng >=12 - - libsqlite 3.46.0 hde9e2c9_0 - - libzlib >=1.2.13,<2.0a0 + - __osx >=10.13 + - libsqlite 3.46.1 h4b8f8c9_0 + - libzlib >=1.3.1,<2.0a0 - ncurses >=6.5,<7.0a0 - readline >=8.2,<9.0a0 license: Unlicense purls: [] - size: 860352 - timestamp: 1718050658212 + size: 912164 + timestamp: 1725353686354 - kind: conda name: stack_data version: 0.6.2 @@ -17455,13 +17686,12 @@ packages: timestamp: 1665138565317 - kind: conda name: tbb - version: 2021.12.0 - build: hc790b64_3 - build_number: 3 + version: 2021.13.0 + build: hc790b64_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.12.0-hc790b64_3.conda - sha256: 721a88d702e31efd9437d387774ef9157846743e66648f5f863b29ae322e8479 - md5: a16e2a639e87c554abee5192ce6ee308 + url: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-hc790b64_0.conda + sha256: 990dbe4fb42f14700c22bd434d8312607bf8d0bd9f922b054e51fda14c41994c + md5: 28496a1e6af43c63927da4f80260348d depends: - libhwloc >=2.11.1,<2.11.2.0a0 - ucrt >=10.0.20348.0 @@ -17470,8 +17700,8 @@ packages: license: Apache-2.0 license_family: APACHE purls: [] - size: 161213 - timestamp: 1720768916898 + size: 151494 + timestamp: 1725532984828 - kind: conda name: tblib version: 3.0.0 @@ -17568,16 +17798,15 @@ packages: timestamp: 1714400228771 - kind: conda name: tiledb - version: 2.25.0 - build: h213c483_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.25.0-h213c483_7.conda - sha256: 3d60651162fc1bd178791ad8b877e5dbadfdb34c6786cc5308a83e375959f639 - md5: 9d8f1988a2d0420abf75e06497667594 + version: 2.26.0 + build: h313d0e2_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.0-h313d0e2_0.conda + sha256: 56f5f42acb0c46f8a0871c8f092850fc58fb8a542adaeb9f3f412ffccacfecde + md5: 7034c5fe1336d6f1c86299ce8e545de0 depends: - - __glibc >=2.17,<3.0.a0 - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - __osx >=10.13 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - azure-identity-cpp >=1.8.0,<1.8.1.0a0 @@ -17588,32 +17817,31 @@ packages: - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - libcurl >=8.9.1,<9.0a0 - - libgcc-ng >=12 + - libcxx >=17 - libgoogle-cloud >=2.28.0,<2.29.0a0 - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 - - libstdcxx-ng >=12 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - spdlog >=1.14.1,<1.15.0a0 - zstd >=1.5.6,<1.6.0a0 license: MIT license_family: MIT purls: [] - size: 4366120 - timestamp: 1723475383496 + size: 3960161 + timestamp: 1726059270643 - kind: conda name: tiledb - version: 2.25.0 - build: h3c7d8a4_7 - build_number: 7 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.25.0-h3c7d8a4_7.conda - sha256: c919786f19f11c7ab37605255968dd400ad311bba26f8c40011ecd0fbfcde281 - md5: df07dcb982b6e5d8a0244afc40df9542 + version: 2.26.0 + build: h86fa3b2_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda + sha256: 3e92cec15daed5e03d7fc676a021500fc92ac80716495504537d6e4bdb80138f + md5: 061175d9d4c046a1cf8bffe95a359fab depends: - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - __glibc >=2.17,<3.0.a0 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - azure-identity-cpp >=1.8.0,<1.8.1.0a0 @@ -17623,36 +17851,32 @@ packages: - fmt >=11.0.2,<12.0a0 - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 - - libcrc32c >=1.1.2,<1.2.0a0 - libcurl >=8.9.1,<9.0a0 + - libgcc >=13 - libgoogle-cloud >=2.28.0,<2.29.0a0 - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 + - libstdcxx >=13 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - spdlog >=1.14.1,<1.15.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 license: MIT license_family: MIT purls: [] - size: 3107503 - timestamp: 1723475894671 + size: 4537477 + timestamp: 1726059097900 - kind: conda name: tiledb - version: 2.25.0 - build: h6b8956e_7 - build_number: 7 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.25.0-h6b8956e_7.conda - sha256: 61602664840a7aa8d457ce3aa26c78bb3f11759e8ba65c90e6c8eb590830f662 - md5: 80b5d1a748253dcc580c8754f7b5d580 + version: 2.26.0 + build: h98a567f_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.0-h98a567f_0.conda + sha256: 823d6d5c172cd90b105553d5dd93e07e0860c8e5751deb3cd076b684366797d7 + md5: 451f161732757b5124fc3a320401c587 depends: - - __osx >=10.13 - - aws-crt-cpp >=0.27.5,<0.27.6.0a0 + - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - azure-identity-cpp >=1.8.0,<1.8.1.0a0 @@ -17662,21 +17886,24 @@ packages: - fmt >=11.0.2,<12.0a0 - libabseil * cxx17* - libabseil >=20240116.2,<20240117.0a0 + - libcrc32c >=1.1.2,<1.2.0a0 - libcurl >=8.9.1,<9.0a0 - - libcxx >=16 - libgoogle-cloud >=2.28.0,<2.29.0a0 - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - spdlog >=1.14.1,<1.15.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 license: MIT license_family: MIT purls: [] - size: 3895254 - timestamp: 1723475275540 + size: 3093646 + timestamp: 1726059615242 - kind: conda name: tinycss2 version: 1.3.0 @@ -17817,11 +18044,12 @@ packages: - kind: conda name: tornado version: 6.4.1 - build: py312h4389bb4_0 + build: py312h4389bb4_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_0.conda - sha256: 1db4650b15e902828ecc67754eb287971879401ce35437f3a8c3c3da2158af2c - md5: 00a82356b77563593acad8b86de9c5c7 + url: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_1.conda + sha256: 79a4155e4700aa188d6de36ed65b2923527864ad775bb156ed0a4067619e8ee0 + md5: e278437965b2420d567ba11b579668bc depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -17832,34 +18060,37 @@ packages: license_family: Apache purls: - pkg:pypi/tornado?source=hash-mapping - size: 844267 - timestamp: 1717723122629 + size: 841567 + timestamp: 1724956763418 - kind: conda name: tornado version: 6.4.1 - build: py312h9a8786e_0 + build: py312h66e93f0_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h9a8786e_0.conda - sha256: fcf92fde5bac323921d97f8f2e66ee134ea01094f14d4e99c56f98187241c638 - md5: fd9c83fde763b494f07acee1404c280e + url: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda + sha256: c0c9cc7834e8f43702956afaa5af7b0639c4835c285108a43e6b91687ce53ab8 + md5: af648b62462794649066366af4ecd5b0 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: Apache-2.0 license_family: Apache purls: - pkg:pypi/tornado?source=hash-mapping - size: 839315 - timestamp: 1717723013620 + size: 837665 + timestamp: 1724956252424 - kind: conda name: tornado version: 6.4.1 - build: py312hbd25219_0 + build: py312hb553811_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hbd25219_0.conda - sha256: efba7cd7d5c311f57fd1a658c0f8ae65f9c5f3c9c41111a689dcad45407944c8 - md5: 5a40db69b327c71511248f8186965bd3 + url: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hb553811_1.conda + sha256: 67711e308059fd4fd9ce2389b155ffcc52723d202b78cdfa01e7d6a3d42725b5 + md5: 479bb06cef210f968f20866277acd8b9 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -17868,8 +18099,8 @@ packages: license_family: Apache purls: - pkg:pypi/tornado?source=hash-mapping - size: 842608 - timestamp: 1717722844100 + size: 841028 + timestamp: 1724956347530 - kind: pypi name: tqdm version: 4.66.5 @@ -17926,20 +18157,20 @@ packages: timestamp: 1721540525136 - kind: conda name: types-python-dateutil - version: 2.9.0.20240316 + version: 2.9.0.20240906 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda - sha256: 6630bbc43dfb72339fadafc521db56c9d17af72bfce459af195eecb01163de20 - md5: 7831efa91d57475373ee52fb92e8d137 + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda + sha256: 737fecb4b6f85a6a85f3fff6cdf5e90c5922b468e036b98f6c1559780cb79664 + md5: 07c483202a209cd23594b62b3451045e depends: - python >=3.6 license: Apache-2.0 AND MIT purls: - pkg:pypi/types-python-dateutil?source=hash-mapping - size: 21769 - timestamp: 1710590028155 + size: 21789 + timestamp: 1725623878468 - kind: conda name: types-pytz version: 2024.1.0.20240417 @@ -17958,22 +18189,21 @@ packages: timestamp: 1713337633292 - kind: conda name: types-requests - version: 2.32.0.20240712 - build: pyhd8ed1ab_1 - build_number: 1 + version: 2.32.0.20240907 + build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240712-pyhd8ed1ab_1.conda - sha256: 89c5c55ac0bdf1772ba2dbe890e803c588d5b4bf4e624c34f2e15c8fe5c17693 - md5: a0493fcf860b2ca37d1c1d0f2952d63b + url: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda + sha256: fd7952c3b23cab240aeb8ba49d994d69de2096daa48633e3894c36e9894dc66a + md5: 9f907bdcfc41daad16bee14d959b18aa depends: - python >=3.8 - urllib3 >=2 license: Apache-2.0 AND MIT purls: - pkg:pypi/types-requests?source=hash-mapping - size: 26286 - timestamp: 1721398305880 + size: 26305 + timestamp: 1725694580160 - kind: conda name: typing-extensions version: 4.12.2 @@ -18093,46 +18323,49 @@ packages: timestamp: 1710532616618 - kind: conda name: tzcode - version: 2024a - build: h10d778d_0 + version: 2024b + build: h00291cd_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024a-h10d778d_0.conda - sha256: e3ee34b2711500f3b1d38309d47cfd7e4d05c0144f0b2b2bdfbc271a28cfdd76 - md5: 8d50ba6668dbd193cd42ccd9099fa2ae + url: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024b-h00291cd_0.conda + sha256: 3cce425fc4b1ab8ccd57e1591334b1ef37c11af108620c283d09902bfb78ada8 + md5: 146c172e6c1e704f8ba8a57a693da033 + depends: + - __osx >=10.13 license: BSD-3-Clause license_family: BSD purls: [] - size: 63341 - timestamp: 1706869081062 + size: 62685 + timestamp: 1725600484536 - kind: conda name: tzcode - version: 2024a - build: h3f72095_0 + version: 2024b + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024a-h3f72095_0.conda - sha256: d3ea2927cabd6c9f27ee0cb498f893ac0133687d6a9e65e0bce4861c732a18df - md5: 32146e34aaec3745a08b6f49af3f41b0 + url: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda + sha256: 20c72e7ba106338d51fdc29a717a54fcd52340063232e944dcd1d38fb6348a28 + md5: db124840386e1f842f93372897d1b857 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=13 license: BSD-3-Clause license_family: BSD purls: [] - size: 69821 - timestamp: 1706868851630 + size: 69349 + timestamp: 1725600364789 - kind: conda name: tzdata version: 2024a - build: h0c530f3_0 + build: h8827d51_1 + build_number: 1 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - sha256: 7b2b69c54ec62a243eb6fba2391b5e443421608c3ae5dbff938ad33ca8db5122 - md5: 161081fc7cec0bfda0d86d7cb595f8d8 + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + sha256: 7d21c95f61319dba9209ca17d1935e6128af4235a67ee4e57a00908a1450081e + md5: 8bfdead4e0fff0383ae4c9c50d0531bd license: LicenseRef-Public-Domain purls: [] - size: 119815 - timestamp: 1706886945727 + size: 124164 + timestamp: 1724736371498 - kind: conda name: ucrt version: 10.0.22621.0 @@ -18151,66 +18384,68 @@ packages: - kind: conda name: ukkonen version: 1.0.1 - build: py312h0d7def4_4 - build_number: 4 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312h0d7def4_4.conda - sha256: f5f7550991ca647f69b67b9188c7104a3456122611dd6a6e753cff555e45dfd9 - md5: 57cfbb8ce3a1800bd343bf6afba6f878 + build: py312h68727a3_5 + build_number: 5 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda + sha256: 9fb020083a7f4fee41f6ece0f4840f59739b3e249f157c8a407bb374ffb733b5 + md5: f9664ee31aed96c85b7319ab0a693341 depends: + - __glibc >=2.17,<3.0.a0 - cffi - - python >=3.12.0rc3,<3.13.0a0 + - libgcc >=13 + - libstdcxx >=13 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/ukkonen?source=hash-mapping - size: 17235 - timestamp: 1695549871621 + size: 13904 + timestamp: 1725784191021 - kind: conda name: ukkonen version: 1.0.1 - build: py312h49ebfd2_4 - build_number: 4 + build: py312hc5c4d5f_5 + build_number: 5 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312h49ebfd2_4.conda - sha256: efca19a5e73e4aacfc5e90a5389272b2508e41dc4adab9eb5353c5200ba37041 - md5: 4e6b5a8025cd8fd97b3cfe103ffce6b1 + url: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312hc5c4d5f_5.conda + sha256: f6433143294c1ca52410bf8bbca6029a04f2061588d32e6d2b67c7fd886bc4e0 + md5: f270aa502d8817e9cb3eb33541f78418 depends: + - __osx >=10.13 - cffi - - libcxx >=15.0.7 - - python >=3.12.0rc3,<3.13.0a0 + - libcxx >=17 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - pkg:pypi/ukkonen?source=hash-mapping - size: 13246 - timestamp: 1695549689363 + size: 13031 + timestamp: 1725784199719 - kind: conda name: ukkonen version: 1.0.1 - build: py312h8572e83_4 - build_number: 4 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h8572e83_4.conda - sha256: f9a4384d466f4d8b5b497d951329dd4407ebe02f8f93456434e9ab789d6e23ce - md5: 52c9e25ee0a32485a102eeecdb7eef52 + build: py312hd5eb7cc_5 + build_number: 5 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312hd5eb7cc_5.conda + sha256: f1944f3d9645a6fa2770966ff010791136e7ce0eaa0c751822b812ac04fee7d6 + md5: d8c5ef1991a5121de95ea8e44c34e13a depends: - cffi - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/ukkonen?source=hash-mapping - size: 14050 - timestamp: 1695549556745 + size: 17213 + timestamp: 1725784449622 - kind: conda name: uri-template version: 1.3.0 @@ -18302,12 +18537,12 @@ packages: - kind: conda name: vc version: '14.3' - build: h8a93ad2_20 - build_number: 20 + build: h8a93ad2_21 + build_number: 21 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - sha256: 23ac5feb15a9adf3ab2b8c4dcd63650f8b7ae860c5ceb073e49cf71d203eddef - md5: 8558f367e1d7700554f7cdb823c46faf + url: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_21.conda + sha256: f14f5238c2e2516e292af43d91df88f212d769b4853eb46d03291793dcf00da9 + md5: e632a9b865d4b653aa656c9fb4f4817c depends: - vc14_runtime >=14.40.33810 track_features: @@ -18315,35 +18550,35 @@ packages: license: BSD-3-Clause license_family: BSD purls: [] - size: 17391 - timestamp: 1717709040616 + size: 17243 + timestamp: 1725984095174 - kind: conda name: vc14_runtime version: 14.40.33810 - build: ha82c5b3_20 - build_number: 20 + build: ha82c5b3_21 + build_number: 21 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_20.conda - sha256: af3cfa347e3d7c1277e9b964b0849a9a9f095bff61836cb3c3a89862fbc32e17 - md5: e39cc4c34c53654ec939558993d9dc5b + url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_21.conda + sha256: c3bf51bff7db39ad7e890dbef1b1026df0af36975aea24dea7c5fe1e0b382c40 + md5: b3ebb670caf046e32b835fbda056c4f9 depends: - ucrt >=10.0.20348.0 constrains: - - vs2015_runtime 14.40.33810.* *_20 + - vs2015_runtime 14.40.33810.* *_21 license: LicenseRef-ProprietaryMicrosoft license_family: Proprietary purls: [] - size: 751934 - timestamp: 1717709031266 + size: 751757 + timestamp: 1725984166774 - kind: conda name: virtualenv - version: 20.26.3 + version: 20.26.4 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.3-pyhd8ed1ab_0.conda - sha256: f78961b194e33eed5fdccb668774651ec9423a043069fa7a4e3e2f853b08aa0c - md5: 284008712816c64c85bf2b7fa9f3b264 + url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda + sha256: 6eeb4f9e541f2e5198185c44ab4f5a2bdf700ca395b18617e12a8e00cf176d05 + md5: 14c15fa7def506fe7d1a0e3abdc212d6 depends: - distlib <1,>=0.3.7 - filelock <4,>=3.12.2 @@ -18353,32 +18588,32 @@ packages: license_family: MIT purls: - pkg:pypi/virtualenv?source=hash-mapping - size: 4363507 - timestamp: 1719150878323 + size: 4886907 + timestamp: 1725779361477 - kind: conda name: vs2015_runtime version: 14.40.33810 - build: h3bf8584_20 - build_number: 20 + build: h3bf8584_21 + build_number: 21 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda - sha256: 0c2803f7a788c51f28235a7228dc2ab3f107b4b16ab0845a3e595c8c51e50a7a - md5: c21f1b4a3a30bbc3ef35a50957578e0e + url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_21.conda + sha256: 472410455c381e406ec8c1d3e0342b48ee23122ef7ffb22a09d9763ca5df4d20 + md5: b3f37db7b7ae1c22600fa26a63ed99b3 depends: - vc14_runtime >=14.40.33810 license: BSD-3-Clause license_family: BSD purls: [] - size: 17395 - timestamp: 1717709043353 + size: 17241 + timestamp: 1725984096440 - kind: conda name: watchdog - version: 4.0.1 + version: 5.0.2 build: py312h2e8e312_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/watchdog-4.0.1-py312h2e8e312_0.conda - sha256: 35c657fd70de86e69dd8fcb04697df660da79410b4098a263acab55d363117ef - md5: 29cbd97528b7f7ce91a59186e391c0db + url: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.2-py312h2e8e312_0.conda + sha256: 580d88e105f473d12d074a5c5367d2c6a59f9163b44cf1dea7e29bb18d70817c + md5: 15280b51a3c50c02fc2ac260ecf90454 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -18387,16 +18622,16 @@ packages: license_family: APACHE purls: - pkg:pypi/watchdog?source=hash-mapping - size: 162034 - timestamp: 1716562347718 + size: 166180 + timestamp: 1725821602917 - kind: conda name: watchdog - version: 4.0.1 + version: 5.0.2 build: py312h7900ff3_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/watchdog-4.0.1-py312h7900ff3_0.conda - sha256: c4786da0c938a65cea07e2bb3fe76dbeed6968c322994c66395176307cf78425 - md5: 7cc94a3b5e9698eecc2c39dbf7a173db + url: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.2-py312h7900ff3_0.conda + sha256: 1c99f785aa6740c9e9e566e766f5499d7b55c5a07d75a348ce955de3247fcf31 + md5: c478f125efbb575135a9280ebd4fa01c depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -18405,16 +18640,16 @@ packages: license_family: APACHE purls: - pkg:pypi/watchdog?source=hash-mapping - size: 136444 - timestamp: 1716561872155 + size: 140866 + timestamp: 1725821237546 - kind: conda name: watchdog - version: 4.0.1 - build: py312hbd25219_0 + version: 5.0.2 + build: py312hb553811_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/watchdog-4.0.1-py312hbd25219_0.conda - sha256: f20d599605b43e670d2f44a6ad76eb916e9d0980c70ac4df2bc527b3f005590a - md5: 7cecf3b27b6a0ba239695d2992a1a177 + url: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.2-py312hb553811_0.conda + sha256: fc2fbd8c54a20fa4b021b772a62da3af9d5e3d9a6b84e4b083a0ffbd08ed9025 + md5: fd6318dc9fc957ec9145b49dd9c31b79 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -18424,26 +18659,27 @@ packages: license_family: APACHE purls: - pkg:pypi/watchdog?source=hash-mapping - size: 144881 - timestamp: 1716561920161 + size: 149711 + timestamp: 1725821254325 - kind: conda name: wayland - version: 1.23.0 - build: h5291e77_0 + version: 1.23.1 + build: h3e06ad9_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.0-h5291e77_0.conda - sha256: 5f2572290dd09d5480abe6e0d9635c17031a12fd4e68578680e9f49444d6dd8b - md5: c13ca0abd5d1d31d0eebcf86d51da8a4 + url: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda + sha256: 0884b2023a32d2620192cf2e2fc6784b8d1e31cf9f137e49e00802d4daf7d1c1 + md5: 0a732427643ae5e0486a727927791da1 depends: + - __glibc >=2.17,<3.0.a0 - libexpat >=2.6.2,<3.0a0 - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc-ng >=13 + - libstdcxx-ng >=13 license: MIT license_family: MIT purls: [] - size: 322846 - timestamp: 1717119371478 + size: 321561 + timestamp: 1724530461598 - kind: conda name: wcwidth version: 0.2.13 @@ -18475,7 +18711,7 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/webcolors?source=compressed-mapping + - pkg:pypi/webcolors?source=hash-mapping size: 18378 timestamp: 1723294800217 - kind: conda @@ -18527,7 +18763,7 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/wheel?source=compressed-mapping + - pkg:pypi/wheel?source=hash-mapping size: 58585 timestamp: 1722797131787 - kind: conda @@ -18566,99 +18802,103 @@ packages: - kind: conda name: wrapt version: 1.16.0 - build: py312h41838bb_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312h41838bb_0.conda - sha256: 9ed208c4c844c50f161764df7ed7a226c42822917c892ab7c8f67eec6ca96dff - md5: d87798aa7210da2c5eaf96c0346dca00 + build: py312h4389bb4_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312h4389bb4_1.conda + sha256: b136f99c616ef39243139929588030ba7fb48a3e518265513206cff405c3e5f4 + md5: d6f56554649b5cc8fff12efb657ea797 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-2-Clause license_family: BSD purls: - pkg:pypi/wrapt?source=hash-mapping - size: 59057 - timestamp: 1699533259706 + size: 60856 + timestamp: 1724958453066 - kind: conda name: wrapt version: 1.16.0 - build: py312h98912ed_0 + build: py312h66e93f0_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h98912ed_0.conda - sha256: dc8431b343961347ad93b33d2d8270e8c15d8825382f4f2540835c94aba2de05 - md5: fa957a1c7bee7e47ad44633caf7be8bc + url: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h66e93f0_1.conda + sha256: 3a15a399eb61a999f0f14b4d243acc14e2dff1ead92ef52fcff30c84be89b21c + md5: 2eebcffe80e2a7bb2f0a77e621a7f124 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-2-Clause license_family: BSD purls: - pkg:pypi/wrapt?source=hash-mapping - size: 62482 - timestamp: 1699532968076 + size: 62624 + timestamp: 1724958046744 - kind: conda name: wrapt version: 1.16.0 - build: py312he70551f_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312he70551f_0.conda - sha256: e4b5ac6c897e68a798dfe13a1499dc9b555c48b468aa477d456807f2a7366c30 - md5: cea7b1aa961de6a8ac90584b5968a01d + build: py312hb553811_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312hb553811_1.conda + sha256: 31e3fdd934cdb8ac47764d1daf13edddd7844a17b229e8ef438fc16e06576b00 + md5: 6385725172f8712d54b5af6b86bf8236 depends: + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-2-Clause license_family: BSD purls: - pkg:pypi/wrapt?source=hash-mapping - size: 61358 - timestamp: 1699533495284 + size: 58290 + timestamp: 1724958107514 - kind: conda name: xarray - version: 2024.7.0 + version: 2024.9.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda - sha256: c8a0c70bb3402b29a9eebb1e41c5d28e9215bb14abea0c986d4d89026aa1ce42 - md5: a7d4ff4bf1502eaba3fbbaeba66969ec + url: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda + sha256: 416f009d6513f73ca2c02fbb65f626c1730b534741a752e74c9b2cd7b1f57edf + md5: 2cde8ed028a0fd8f35d7f9b44839d362 depends: - - numpy >=1.23 + - numpy >=1.24 - packaging >=23.1 - - pandas >=2.0 - - python >=3.9 + - pandas >=2.1 + - python >=3.10 constrains: - - dask-core >=2023.4 - - hdf5 >=1.12 - - bottleneck >=1.3 - - numba >=0.56 + - dask-core >=2023.9 + - flox >=0.7 + - numba >=0.57 - h5py >=3.8 - - h5netcdf >=1.1 - - iris >=3.4 + - hdf5 >=1.12 + - netcdf4 >=1.6.0 + - scipy >=1.11 + - zarr >=2.16 - sparse >=0.14 - - matplotlib-base >=3.7 - - toolz >=0.12 - - distributed >=2023.4 - - seaborn >=0.12 - - zarr >=2.14 - cftime >=1.6 + - iris >=3.7 + - seaborn >=0.12 + - distributed >=2023.9 + - matplotlib-base >=3.7 - pint >=0.22 - - netcdf4 >=1.6.0 - nc-time-axis >=1.4 - - scipy >=1.10 - - cartopy >=0.21 - - flox >=0.7 + - bottleneck >=1.3 + - h5netcdf >=1.2 + - cartopy >=0.22 + - toolz >=0.12 license: Apache-2.0 - license_family: APACHE purls: - pkg:pypi/xarray?source=compressed-mapping - size: 791540 - timestamp: 1722348308549 + size: 802366 + timestamp: 1726135055732 - kind: conda name: xcb-util version: 0.4.1 @@ -18678,15 +18918,15 @@ packages: timestamp: 1718843348208 - kind: conda name: xcb-util-cursor - version: 0.1.4 - build: h4ab18f5_2 - build_number: 2 + version: 0.1.5 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.4-h4ab18f5_2.conda - sha256: c72e58bae4a7972ca4dee5e850e82216222c06d53b3651e1ca7db8b5d2fc95fe - md5: 79e46d4a6ccecb7ee1912042958a8758 + url: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda + sha256: c7b35db96f6e32a9e5346f97adc968ef2f33948e3d7084295baebc0e33abdd5b + md5: eb44b3b6deb1cab08d72cb61686fe64c depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - libxcb >=1.13 - libxcb >=1.16,<1.17.0a0 - xcb-util-image >=0.4.0,<0.5.0a0 @@ -18694,8 +18934,8 @@ packages: license: MIT license_family: MIT purls: [] - size: 20397 - timestamp: 1718899451268 + size: 20296 + timestamp: 1726125844850 - kind: conda name: xcb-util-image version: 0.4.0 @@ -18836,6 +19076,39 @@ packages: purls: [] size: 388998 timestamp: 1717817668629 +- kind: conda + name: xorg-fixesproto + version: '5.0' + build: h7f98852_1002 + build_number: 1002 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2 + sha256: 5d2af1b40f82128221bace9466565eca87c97726bb80bbfcd03871813f3e1876 + md5: 65ad6e1eb4aed2b0611855aff05e04f6 + depends: + - libgcc-ng >=9.3.0 + - xorg-xextproto + license: MIT + license_family: MIT + purls: [] + size: 9122 + timestamp: 1617479697350 +- kind: conda + name: xorg-inputproto + version: 2.3.2 + build: h7f98852_1002 + build_number: 1002 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2 + sha256: 6c8c2803de0f643f8bad16ece3f9a7259e4a49247543239c182d66d5e3a129a7 + md5: bcd1b3396ec6960cbc1d2855a9e60b2b + depends: + - libgcc-ng >=9.3.0 + license: MIT + license_family: MIT + purls: [] + size: 19602 + timestamp: 1610027678228 - kind: conda name: xorg-kbproto version: 1.0.7 @@ -19009,6 +19282,47 @@ packages: purls: [] size: 50143 timestamp: 1677036907815 +- kind: conda + name: xorg-libxfixes + version: 5.0.3 + build: h7f98852_1004 + build_number: 1004 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2 + sha256: 1e426a1abb774ef1dcf741945ed5c42ad12ea2dc7aeed7682d293879c3e1e4c3 + md5: e9a21aa4d5e3e5f1aed71e8cefd46b6a + depends: + - libgcc-ng >=9.3.0 + - xorg-fixesproto + - xorg-libx11 >=1.7.0,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 18145 + timestamp: 1617717802636 +- kind: conda + name: xorg-libxi + version: 1.7.10 + build: h4bc722e_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda + sha256: e1416eb435e3d903bc658e3c637f0e87efd2dca290fe70daf29738b3a3d1f8ff + md5: 749baebe7e2ff3360630e069175e528b + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + - xorg-inputproto + - xorg-libx11 >=1.8.9,<2.0a0 + - xorg-libxext 1.3.* + - xorg-libxext >=1.3.4,<2.0a0 + - xorg-libxfixes 5.0.* + - xorg-xextproto >=7.3.0,<8.0a0 + license: MIT + license_family: MIT + purls: [] + size: 46794 + timestamp: 1722108216651 - kind: conda name: xorg-libxrender version: 0.9.11 @@ -19026,6 +19340,65 @@ packages: purls: [] size: 37770 timestamp: 1688300707994 +- kind: conda + name: xorg-libxtst + version: 1.2.5 + build: h4bc722e_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-h4bc722e_0.conda + sha256: 0139b52c3cbce57bfd1d120c41637bc239430faff4aa0445f58de0adf4c4b976 + md5: 185159d666308204eca00295599b0a5c + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + - xorg-inputproto + - xorg-libx11 >=1.8.9,<2.0a0 + - xorg-libxext 1.3.* + - xorg-libxext >=1.3.4,<2.0a0 + - xorg-libxi 1.7.* + - xorg-libxi >=1.7.10,<2.0a0 + - xorg-recordproto + license: MIT + license_family: MIT + purls: [] + size: 32931 + timestamp: 1722575571554 +- kind: conda + name: xorg-libxxf86vm + version: 1.1.5 + build: h4bc722e_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-h4bc722e_1.conda + sha256: 109d6b1931d1482faa0bf6de83c7e6d9ca36bbf9d36a00a05df4f63b82fce5c3 + md5: 0c90ad87101001080484b91bd9d2cdef + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + - xorg-libx11 >=1.8.9,<2.0a0 + - xorg-libxext >=1.3.4,<2.0a0 + - xorg-xextproto >=7.3.0,<8.0a0 + license: MIT + license_family: MIT + purls: [] + size: 18443 + timestamp: 1722110433983 +- kind: conda + name: xorg-recordproto + version: 1.14.2 + build: h7f98852_1002 + build_number: 1002 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-recordproto-1.14.2-h7f98852_1002.tar.bz2 + sha256: 4b91d48fed368c83eafd03891ebfd5bae0a03adc087ebea8a680ae22da99a85f + md5: 2f835e6c386e73c6faaddfe9eda67e98 + depends: + - libgcc-ng >=9.3.0 + license: MIT + license_family: MIT + purls: [] + size: 8014 + timestamp: 1621340029114 - kind: conda name: xorg-renderproto version: 0.11.1 @@ -19076,13 +19449,13 @@ packages: timestamp: 1607291557628 - kind: conda name: xugrid - version: 0.11.0 + version: 0.12.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.11.0-pyhd8ed1ab_0.conda - sha256: 1f4bef6a7edb21c173c6b69788a42a46e350275664a79de14dc9efcdb9460627 - md5: 1d2fe2eccb1568bee8641cdf359ff742 + url: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda + sha256: 0ebb18754772cd8d17a2628715eab89ffc9848b98347216750a1cc141f966b0f + md5: fc0bdab291560b19d2be5f88e223e891 depends: - dask - geopandas @@ -19098,26 +19471,26 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/xugrid?source=compressed-mapping - size: 94836 - timestamp: 1722866212176 + - pkg:pypi/xugrid?source=hash-mapping + size: 100533 + timestamp: 1725906846106 - kind: conda name: xyzservices - version: 2024.6.0 + version: 2024.9.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.6.0-pyhd8ed1ab_0.conda - sha256: da2e54cb68776e62a708cb6d5f026229d8405ff4cfd8a2446f7d386f07ebc5c1 - md5: de631703d59e40af41c56c4b4e2928ab + url: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda + sha256: 2dd2825b5a246461a95a0affaf7e1d459f7cc0ae68ad2dd8aab360c2e5859488 + md5: 156c91e778c1d4d57b709f8c5333fd06 depends: - python >=3.8 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/xyzservices?source=hash-mapping - size: 46663 - timestamp: 1717752234053 + size: 46887 + timestamp: 1725366457240 - kind: conda name: xz version: 5.2.6 @@ -19209,61 +19582,62 @@ packages: - kind: conda name: zeromq version: 4.3.5 - build: h75354e8_4 - build_number: 4 + build: ha4adb4c_5 + build_number: 5 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h75354e8_4.conda - sha256: bc9aaee39e7be107d7daff237435dfd8f791aca460a98583a36a263615205262 - md5: 03cc8d9838ad9dd0060ab532e81ccb21 + url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda + sha256: dd48adc07fcd029c86fbf82e68d0e4818c7744b768e08139379920b56b582814 + md5: e8372041ebb377237db9d0d24c7b5962 depends: - - krb5 >=1.21.2,<1.22.0a0 - - libgcc-ng >=12 - - libsodium >=1.0.18,<1.0.19.0a0 - - libstdcxx-ng >=12 + - __glibc >=2.17,<3.0.a0 + - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=13 + - libsodium >=1.0.20,<1.0.21.0a0 + - libstdcxx >=13 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 353229 - timestamp: 1715607188837 + size: 353159 + timestamp: 1725429777124 - kind: conda name: zeromq version: 4.3.5 - build: hde137ed_4 - build_number: 4 + build: hb33e954_5 + build_number: 5 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hde137ed_4.conda - sha256: 871625ce993e6c61649b14659a3d1d6011fbb242b7d6a25cadbc6300b2356f32 - md5: e56609055da6c658aa329d42a6c6b9f2 + url: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hb33e954_5.conda + sha256: 7e63a9ec19660666095ea9332a5b226329ff4f499018e8a281d0d160cbb60ca4 + md5: a9735eb372d515c78f8211785406e36f depends: - __osx >=10.13 - - krb5 >=1.21.2,<1.22.0a0 - - libcxx >=16 - - libsodium >=1.0.18,<1.0.19.0a0 + - krb5 >=1.21.3,<1.22.0a0 + - libcxx >=17 + - libsodium >=1.0.20,<1.0.21.0a0 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 304498 - timestamp: 1715607961981 + size: 303596 + timestamp: 1725430161260 - kind: conda name: zeromq version: 4.3.5 - build: he1f189c_4 - build_number: 4 + build: he1f189c_5 + build_number: 5 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_4.conda - sha256: 0f375034a88659f764ce837f324698a883da227fcb517561ffaf6a89474211b4 - md5: b755eb545c2728b9a53729f02e627834 + url: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_5.conda + sha256: 7cfea95cc9f637ad5b651cde6bb22ddcd7989bd9b21e3c6df4958f618c13b807 + md5: a6df1c5da1f16f02e872994611dc4dfb depends: - - krb5 >=1.21.2,<1.22.0a0 - - libsodium >=1.0.18,<1.0.19.0a0 + - krb5 >=1.21.3,<1.22.0a0 + - libsodium >=1.0.20,<1.0.21.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 2707065 - timestamp: 1715607874610 + size: 2710711 + timestamp: 1725430044838 - kind: conda name: zict version: 3.0.0 @@ -19283,21 +19657,21 @@ packages: timestamp: 1681770298596 - kind: conda name: zipp - version: 3.20.0 + version: 3.20.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.0-pyhd8ed1ab_0.conda - sha256: 72fa72af24006e37a9f027d6d9f407369edcbd9bbb93db299820eb63ea07e404 - md5: 05b6bcb391b5be17374f7ad0aeedc479 + url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda + sha256: 30762bd25b6fc8714d5520a223ccf20ad4a6792dc439c54b59bf44b60bf51e72 + md5: 74a4befb4b38897e19a107693e49da20 depends: - python >=3.8 license: MIT license_family: MIT purls: - - pkg:pypi/zipp?source=compressed-mapping - size: 20857 - timestamp: 1723591347715 + - pkg:pypi/zipp?source=hash-mapping + size: 21110 + timestamp: 1724731063145 - kind: conda name: zlib version: 1.3.1 @@ -19354,11 +19728,12 @@ packages: - kind: conda name: zstandard version: 0.23.0 - build: py312h331e495_0 + build: py312h7122b0e_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h331e495_0.conda - sha256: c1d379d1062f23e3fbd3dd8548fc6cf61b23d6f96b11e78c4e01f4761580cb02 - md5: fb62d40e45f51f7d6a7df47c9a12caf4 + url: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h7122b0e_1.conda + sha256: 2685dde42478fae0780fba5d1f8a06896a676ae105f215d32c9f9e76f3c6d8fd + md5: bd132ba98f3fc0a6067f355f8efe4cb6 depends: - __osx >=10.13 - cffi >=1.11 @@ -19370,53 +19745,55 @@ packages: license_family: BSD purls: - pkg:pypi/zstandard?source=hash-mapping - size: 411066 - timestamp: 1721044218542 + size: 410873 + timestamp: 1725305688706 - kind: conda name: zstandard version: 0.23.0 - build: py312h3483029_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312h3483029_0.conda - sha256: 7e1e105ea7eab2af591faebf743ff2493f53c313079e316419577925e4492b03 - md5: eab52e88c858d87cf5a069f79d10bb50 + build: py312h7606c53_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_1.conda + sha256: 3e0c718aa18dcac7f080844dbe0aea41a9cea75083019ce02e8a784926239826 + md5: a92cc3435b2fd6f51463f5a4db5c50b1 depends: - - __glibc >=2.17,<3.0.a0 - cffi >=1.11 - - libgcc-ng >=12 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.5.7.0a0 - zstd >=1.5.6,<1.6.0a0 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/zstandard?source=hash-mapping - size: 416708 - timestamp: 1721044154409 + size: 320624 + timestamp: 1725305934189 - kind: conda name: zstandard version: 0.23.0 - build: py312h7606c53_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_0.conda - sha256: 907edf473419a5aff6151900d09bb3f2b2c2ede8964f20ae87cb6fae04d0cbb7 - md5: c405924e081cb476495ffe72c88e92c2 + build: py312hef9b889_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda + sha256: b97015e146437283f2213ff0e95abdc8e2480150634d81fbae6b96ee09f5e50b + md5: 8b7069e9792ee4e5b4919a7a306d2e67 depends: + - __glibc >=2.17,<3.0.a0 - cffi >=1.11 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.5.7.0a0 - zstd >=1.5.6,<1.6.0a0 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/zstandard?source=hash-mapping - size: 320649 - timestamp: 1721044547910 + size: 419552 + timestamp: 1725305670210 - kind: conda name: zstd version: 1.5.6 From 87f36234e5b362eeaa3efaa58b98454fc92e7920 Mon Sep 17 00:00:00 2001 From: Maarten Pronk Date: Mon, 23 Sep 2024 11:38:02 +0200 Subject: [PATCH 04/23] Try latest pixi (#156) to see whether they solved our lock file being out of date, which was worked around in #144 (this reverts that). --- .github/workflows/docs.yml | 1 - .github/workflows/python_lint.yml | 1 - .github/workflows/python_tests.yml | 1 - 3 files changed, 3 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 780b804..ef3c02a 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -31,7 +31,6 @@ jobs: with: manifest-path: Ribasim-NL/pixi.toml pixi-version: "latest" - frozen: true - name: Check Quarto installation and all engines working-directory: Ribasim-NL diff --git a/.github/workflows/python_lint.yml b/.github/workflows/python_lint.yml index 54016f5..4fbe4cd 100644 --- a/.github/workflows/python_lint.yml +++ b/.github/workflows/python_lint.yml @@ -31,7 +31,6 @@ jobs: with: manifest-path: Ribasim-NL/pixi.toml pixi-version: "latest" - frozen: true - name: Run mypy on hydamo working-directory: Ribasim-NL diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml index d1a0b23..708bd8a 100644 --- a/.github/workflows/python_tests.yml +++ b/.github/workflows/python_tests.yml @@ -37,7 +37,6 @@ jobs: with: manifest-path: Ribasim-NL/pixi.toml pixi-version: "latest" - frozen: true - name: Run tests working-directory: Ribasim-NL From a379f22c8b4103f7c29e682888124b56dcfd3bff Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Fri, 27 Sep 2024 13:11:18 +0200 Subject: [PATCH 05/23] De Dommel met bergend gebied (#158) Een model van De Dommel met bergingsknopen volgens methode Vd Gaast. Resulteert in model DeDommel versie 2024.9.0, beschikbaar via: https://deltares.thegood.cloud/f/119263 fixes/updates: - #102 - #157 - #97 --- notebooks/de_dommel/01_fix_model_network.py | 93 ++++++- notebooks/de_dommel/03_fix_basin_area.py | 11 +- notebooks/de_dommel/04_parameterize_model.py | 2 + notebooks/de_dommel/05_add_berging.py | 273 +++++++++++++++++++ notebooks/de_dommel/99_upload_model.py | 6 + src/ribasim_nl/ribasim_nl/geometry.py | 7 +- src/ribasim_nl/ribasim_nl/model.py | 4 + 7 files changed, 384 insertions(+), 12 deletions(-) create mode 100644 notebooks/de_dommel/05_add_berging.py create mode 100644 notebooks/de_dommel/99_upload_model.py diff --git a/notebooks/de_dommel/01_fix_model_network.py b/notebooks/de_dommel/01_fix_model_network.py index 1c87870..d2ff224 100644 --- a/notebooks/de_dommel/01_fix_model_network.py +++ b/notebooks/de_dommel/01_fix_model_network.py @@ -4,7 +4,7 @@ from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet from ribasim_nl import CloudStorage, Model, NetworkValidator -from shapely.geometry import Point +from shapely.geometry import Point, Polygon cloud = CloudStorage() @@ -117,13 +117,13 @@ model.level_boundary.add(boundary_node, [level_data]) model.edge.add(model.tabulated_rating_curve[614], model.level_boundary[28]) - +# %% # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2292014475 model.remove_node(node_id=1898, remove_edges=True) # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2292017813 -for node_id in [1891, 989, 1058]: - model.remove_node(node_id, remove_edges=True) +model.update_node(989, "Outlet", [outlet.Static(flow_rate=[0])]) +model.update_node(1891, "LevelBoundary", [level_data]) # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291988317 # for from_node_id, to_node_id in [799, 1580, 625, 1123, 597, 978]: @@ -156,11 +156,49 @@ if not network_validator.node_internal_basin().empty: raise Exception("nog steeds interne basins") + +# see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2367724440 +gdf = gpd.read_file( + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg"), + layer="stuw", + fid_as_index=True, +) +kst = gdf.loc[35] +geometry = Point(kst.geometry.x, kst.geometry.y) +name = kst.CODE +meta_object_type = "stuw" + +outlet_node_id = model.next_node_id + +kst_node = model.outlet.add( + Node(node_id=outlet_node_id, geometry=geometry, name=name, meta_object_type=meta_object_type), + [outlet_data], +) + + +gdf = gpd.read_file( + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg"), + layer="hydroobject", + engine="pyogrio", + fid_as_index=True, +) +geometry = gdf.at[2822, "geometry"].interpolate(0.5, normalized=True) +basin_node_id = model.next_node_id +basin_node = model.basin.add( + Node(node_id=basin_node_id, geometry=geometry, meta_krw_name="Witte Loop/Peelrijt", meta_krw_id="NL27_KD_3_2"), + basin_data, +) + +model.remove_edge(from_node_id=664, to_node_id=8, remove_disconnected_nodes=False) +model.edge.add(model.manning_resistance[664], basin_node) +model.edge.add(basin_node, kst_node) +model.edge.add(kst_node, model.level_boundary[8]) + +# see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2293486609 df = network_validator.edge_incorrect_type_connectivity( from_node_type="ManningResistance", to_node_type="LevelBoundary" ) -# see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2293486609 for node_id in df.from_node_id: model.update_node(node_id, "Outlet", [outlet.Static(flow_rate=[100])]) @@ -174,9 +212,50 @@ model.basin.area.df = model.basin.area.df[~model.basin.area.df.node_id.isin(model.unassigned_basin_area.node_id)] -# # %% write model + +# fix basin area + +# see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2370880128 +basin_polygon = model.basin.area.df.union_all() +holes = [Polygon(interior) for polygon in basin_polygon.buffer(10).buffer(-10).geoms for interior in polygon.interiors] +geoseries = gpd.GeoSeries(holes, crs=28992) + +drainage_areas_df = gpd.read_file( + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "areas.gpkg"), layer="drainage_areas" +) + +drainage_areas_df = drainage_areas_df[drainage_areas_df.buffer(-10).intersects(basin_polygon)] + +for idx, geometry in enumerate(geoseries): + # select drainage-area + drainage_area_select = drainage_areas_df[drainage_areas_df.contains(geometry.buffer(-10))] + if not drainage_area_select.empty: + if not len(drainage_area_select) == 1: + raise ValueError("hole contained by multiple drainage areas, can't fix that yet") + + drainage_area = drainage_area_select.iloc[0].geometry + + # find basin_id to merge to + selected_basins_df = model.basin.area.df[model.basin.area.df.buffer(-10).within(drainage_area)].set_index( + "node_id" + ) + intersecting_basins_df = selected_basins_df.intersection(geometry.buffer(10)) + assigned_basin_id = selected_basins_df.intersection(geometry.buffer(10)).area.idxmax() + + # clip and merge geometry + geometry = geometry.buffer(10).difference(basin_polygon) + geometry = ( + model.basin.area.df.set_index("node_id") + .at[assigned_basin_id, "geometry"] + .union(geometry) + .buffer(0.1) + .buffer(-0.1) + ) + model.basin.area.df.loc[model.basin.area.df.node_id == assigned_basin_id, "geometry"] = geometry + +# %% write model model.edge.df.reset_index(drop=True, inplace=True) model.edge.df.index.name = "edge_id" -ribasim_toml = ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_model_network", "model.toml") +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_model_network", "model.toml") model.write(ribasim_toml) diff --git a/notebooks/de_dommel/03_fix_basin_area.py b/notebooks/de_dommel/03_fix_basin_area.py index 7e8dd0c..50a1636 100644 --- a/notebooks/de_dommel/03_fix_basin_area.py +++ b/notebooks/de_dommel/03_fix_basin_area.py @@ -23,13 +23,14 @@ dissolved_area_gdf.to_file(cloud.joinpath("DeDommel", "verwerkt", "water_area.gpkg")) # %% -basin_df = model.basin.node.df -basin_area_df = gpd.read_file( - cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg"), engine="pyogrio", fid_as_index=True -) +basin_area_gpkg = cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg") +basin_area_df = model.basin.area.df +basin_area_df.to_file(basin_area_gpkg) basin_area_df.set_index("node_id", inplace=True) +basin_df = model.basin.node.df +# %% data = [] ignore_basins = [1278, 1228, 1877, 1030] row = next(i for i in basin_df.itertuples() if i.Index == 1230) @@ -76,6 +77,8 @@ area_df = gpd.GeoDataFrame(data, crs=model.basin.node.df.crs) area_df = area_df[~area_df.is_empty] area_df.index.name = "fid" +mask = area_df.geometry.type == "Polygon" +area_df.loc[mask, "geometry"] = area_df.geometry[mask].apply(lambda x: MultiPolygon([x])) model.basin.area.df = area_df # %% ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_areas", "model.toml") diff --git a/notebooks/de_dommel/04_parameterize_model.py b/notebooks/de_dommel/04_parameterize_model.py index 82675ae..2071205 100644 --- a/notebooks/de_dommel/04_parameterize_model.py +++ b/notebooks/de_dommel/04_parameterize_model.py @@ -357,6 +357,8 @@ def get_area_and_profile(node_id): flow_rate = round( basin_area_df.at[model.upstream_node_id(node_id), "geometry"].area * 0.015 / 86400, 2 ) # 15mm/day of upstream areay + else: + flow_rate = flow_rate / 60 # m3/min -> m3/s data = pump.Static(flow_rate=[flow_rate], min_upstream_level=min_upstream_level) diff --git a/notebooks/de_dommel/05_add_berging.py b/notebooks/de_dommel/05_add_berging.py new file mode 100644 index 0000000..ea32582 --- /dev/null +++ b/notebooks/de_dommel/05_add_berging.py @@ -0,0 +1,273 @@ +# %% + +import geopandas as gpd +import numpy as np +import numpy.typing as npt +import pandas as pd +import rasterio +from rasterio.windows import from_bounds +from rasterstats import zonal_stats +from ribasim import Node +from ribasim.nodes import basin, tabulated_rating_curve +from ribasim_nl import CloudStorage, Model +from ribasim_nl.geometry import basin_to_point +from shapely.geometry import LineString + +cloud = CloudStorage() + +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_parameterized", "model.toml") +model = Model.read(ribasim_toml) + +banden = { + "maaiveld": 1, + "bodemhoogte_primair_winter": 2, + "bodemhoogte_primair_zomer": 3, + "bodemhoogte_secundair_winter": 4, + "bodemhoogte_secundair_zomer": 5, + "bodemhoogte_tertiair_winter": 6, + "bodemhoogte_tertiair_zomer": 7, + "ghg_2010-2019": 8, + "glg_2010-2019": 9, + "opp_primair": 10, + "opp_secundair": 11, + "opp_tertiair": 12, +} + +basin_area_df = gpd.read_file( + cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg"), engine="pyogrio", fid_as_index=True +) +basin_area_df.set_index("node_id", inplace=True) + +lhm_rasters = cloud.joinpath("Basisgegevens", "LHM", "4.3", "input", "LHM_data.tif") +ma_raster = cloud.joinpath("Basisgegevens", "VanDerGaast_QH", "spafvoer1.tif") + + +def sample_raster( + raster_file, + df, + band=1, + fill_value: float | None = None, + all_touched=False, + stats="mean", + maaiveld_data: npt.ArrayLike | None = None, +): + with rasterio.open(raster_file) as raster_src: + # read band + data = raster_src.read(band) + + if maaiveld_data is not None: + data = maaiveld_data - data + + # fill nodata + if fill_value is not None: + data = np.where(data == raster_src.nodata, fill_value, data) + + affine = raster_src.transform + + return zonal_stats(df, data, affine=affine, stats=stats, nodata=raster_src.nodata, all_touched=all_touched) + + +def get_rating_curve(row, min_level, maaiveld: None | float = None): + flow_rate = np.round([0, row.ma * 0.2, row.ma * 0.33, row.ma / 2, row.ma * 2], decimals=2) + depth = np.round([row.glg + 1, row.glg, row.ghg, row.ghg / 2, 0], decimals=2) + + # set GxG < 0 to 0 + depth[depth < 0] = 0 + + # level relative to maaiveld + if maaiveld is not None: + level = maaiveld - depth + else: + level = row.maaiveld - depth + + # make sure level >= min_level + level[level < min_level] = min_level + + # flow_rate in m3/s + flow_rate = flow_rate / 1000 * row.geometry.area / 86400 + + df = pd.DataFrame({"level": np.round(level, decimals=2), "flow_rate": np.round(flow_rate, decimals=5)}) + df.drop_duplicates("level", keep="first", inplace=True) + df.drop_duplicates("flow_rate", keep="last", inplace=True) + + return tabulated_rating_curve.Static(level=df.level, flow_rate=df.flow_rate) + + +def get_basin_profile(basin_polygon, polygon, max_level, min_level): + with rasterio.open(lhm_rasters) as src: + level = np.array([], dtype=float) + area = np.array([], dtype=float) + + # Get the window and its transform + window = from_bounds(*basin_polygon.bounds, transform=src.transform) + + if (window.width) < 1 or (window.height < 1): + window = from_bounds(*basin_polygon.centroid.buffer(125).bounds, transform=src.transform) + window_transform = src.window_transform(window) + + # Primary water bottom-level + window_data = src.read(3, window=window) + + # We don't want hoofdwater / doorgaand water to be in profile + if (polygon is None) | (window_data.size == 0): + mask = ~np.isnan(window_data) + else: + mask = rasterio.features.geometry_mask( + [polygon], window_data.shape, window_transform, all_touched=True, invert=True + ) + # Include nodata as False in mask + mask[np.isnan(window_data)] = False + + # add levels + level = np.concat([level, window_data[mask].ravel()]) + + # add areas on same mask + window_data = src.read(10, window=window) + area = np.concat([area, window_data[mask].ravel()]) + + # Secondary water + window_data = src.read(5, window=window) + mask = ~np.isnan(window_data) + level = np.concat([level, window_data[mask].ravel()]) + + window_data = src.read(11, window=window) + area = np.concat([area, window_data[mask].ravel()]) + + # Tertiary water water + window_data = src.read(7, window=window) + mask = ~np.isnan(window_data) + level = np.concat([level, window_data[mask].ravel()]) + + window_data = src.read(12, window=window) + area = np.concat([area, window_data[mask].ravel()]) + + # Make sure area is never larger than polygon-area + area[area > basin_polygon.area] = basin_polygon.area + + # If area is empty, we add min_level at 5% of polygon-area + if area.size == 0: + level = np.append(level, min_level) + area = np.append(area, basin_polygon.area * 0.05) + + # Add extra row with max_level at basin_polygon.area + level = np.append(level, max_level) + area = np.append(area, basin_polygon.area) + + # In pandas for magic + df = pd.DataFrame({"level": np.round(level, decimals=2), "area": np.round(area)}) + df.sort_values(by="level", inplace=True) + df = df.set_index("level").cumsum().reset_index() + df.dropna(inplace=True) + df.drop_duplicates("level", keep="last", inplace=True) + + # Return profile + return basin.Profile(area=df.area, level=df.level) + + +# %% add columns + +with rasterio.open(lhm_rasters) as raster_src: + # read band + maaiveld_data = raster_src.read(banden["maaiveld"]) + +# sample rasters +ghg = sample_raster( + raster_file=lhm_rasters, + df=basin_area_df, + band=banden["ghg_2010-2019"], + all_touched=True, + maaiveld_data=maaiveld_data, +) +basin_area_df.loc[:, ["ghg"]] = pd.Series(dtype=float) +basin_area_df.loc[:, ["ghg"]] = [i["mean"] for i in ghg] + +glg = sample_raster( + raster_file=lhm_rasters, + df=basin_area_df, + band=banden["glg_2010-2019"], + all_touched=True, + maaiveld_data=maaiveld_data, +) +basin_area_df.loc[:, ["glg"]] = pd.Series(dtype=float) +basin_area_df.loc[:, ["glg"]] = [i["mean"] for i in glg] + +ma = sample_raster(raster_file=ma_raster, df=basin_area_df, all_touched=True, fill_value=37) # 37mm/dag is +basin_area_df.loc[:, ["ma"]] = pd.Series(dtype=float) +basin_area_df.loc[:, ["ma"]] = [i["mean"] for i in ma] + +maaiveld = sample_raster( + raster_file=lhm_rasters, df=basin_area_df, band=banden["maaiveld"], stats="mean min max", all_touched=True +) +basin_area_df.loc[:, ["maaiveld"]] = pd.Series(dtype=float) +basin_area_df.loc[:, ["maaiveld"]] = [i["mean"] if pd.isna(i["mean"]) else i["mean"] for i in maaiveld] +basin_area_df.loc[:, ["maaiveld_max"]] = [i["max"] if pd.isna(i["max"]) else i["max"] for i in maaiveld] +basin_area_df.loc[:, ["maaiveld_min"]] = [i["min"] if pd.isna(i["min"]) else i["min"] for i in maaiveld] +# %%update model +edge_id = model.edge.df.index.max() + 1 +for row in model.basin.node.df.itertuples(): + # row = next(row for row in model.basin.node.df.itertuples() if row.Index == 1013) + node_id = row.Index + + if node_id in basin_area_df.index: + # basin-polygon + basin_row = basin_area_df.loc[node_id] + basin_polygon = basin_area_df.at[node_id, "geometry"] + + # add basin-node + basin_node_id = ( + model.next_node_id + ) # FIXME: can be removed if issue is closed https://github.com/Deltares/Ribasim/issues/1805 + geometry = basin_to_point(basin_polygon=basin_polygon, tolerance=10) + node = Node( + node_id=basin_node_id, + meta_categorie="bergend", + geometry=geometry, + ) + + if node_id in model.basin.area.df.node_id.to_list(): + polygon = model.basin.area.df.set_index("node_id").at[node_id, "geometry"] + else: + polygon = None + + max_level = max_level = basin_area_df.at[node_id, "maaiveld_max"] + min_level = max_level = basin_area_df.at[node_id, "maaiveld_min"] + if min_level == max_level: + min_level -= 0.1 + basin_profile = get_basin_profile(basin_polygon, polygon, max_level=max_level, min_level=min_level) + data = [ + basin_profile, + basin.State(level=[basin_profile.df.level.min() + 0.1]), + basin.Area(geometry=[basin_polygon]), + ] + basin_node = model.basin.add(node=node, tables=data) + + # get line + line = LineString([geometry, row.geometry]) + + # add tabulated rating curve + tbr_node_id = model.next_node_id + geometry = line.interpolate(0.5, normalized=True) + node = Node( + node_id=tbr_node_id, + meta_categorie="bergend", + geometry=geometry, + ) + if any(pd.isna(getattr(basin_row, i)) for i in ["ghg", "glg", "ma"]): + raise ValueError(f"No valid ghg, glg and/or ma for basin_id {node_id}") + else: + data = [get_rating_curve(row=basin_row, min_level=basin_profile.df.level.min())] + tbr_node = model.tabulated_rating_curve.add(node=node, tables=data) + + # add edges + edge_id += 1 # FIXME: can be removed if issue is closed https://github.com/Deltares/Ribasim/issues/1804 + model.edge.add(basin_node, tbr_node, edge_id=edge_id, meta_categorie="bergend") + edge_id += 1 + model.edge.add(tbr_node, model.basin[node_id], edge_id=edge_id, meta_categorie="bergend") + + else: + print(f"Geen basin-vlak voor {node_id}") + +# %% +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_bergend", "model.toml") + +model.write(ribasim_toml) diff --git a/notebooks/de_dommel/99_upload_model.py b/notebooks/de_dommel/99_upload_model.py new file mode 100644 index 0000000..c2d02fb --- /dev/null +++ b/notebooks/de_dommel/99_upload_model.py @@ -0,0 +1,6 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +cloud.upload_model("DeDommel", "DeDommel", include_results=True, include_plots=False) diff --git a/src/ribasim_nl/ribasim_nl/geometry.py b/src/ribasim_nl/ribasim_nl/geometry.py index 95de8fb..379333d 100644 --- a/src/ribasim_nl/ribasim_nl/geometry.py +++ b/src/ribasim_nl/ribasim_nl/geometry.py @@ -8,19 +8,24 @@ from ribasim_nl.generic import _validate_inputs -def basin_to_point(basin_polygon: Polygon | MultiPolygon) -> Point: +def basin_to_point(basin_polygon: Polygon | MultiPolygon, tolerance: None | float = None) -> Point: """Return a representative point for the basin; centroid if it is within (Multi)Polygon or polylabel if not. Parameters ---------- basin_polygon : Polygon | MultiPolygon (Multi)Polygon to get representative point for + tolerance: None | float + Enforce a tolerance by which the point is to be within the polygon Returns ------- Point Representative point for the basin """ + if tolerance is not None: + basin_polygon = basin_polygon.buffer(-tolerance) + point = basin_polygon.centroid # if point not within basin, we return polylabel diff --git a/src/ribasim_nl/ribasim_nl/model.py b/src/ribasim_nl/ribasim_nl/model.py index e076c73..4311a35 100644 --- a/src/ribasim_nl/ribasim_nl/model.py +++ b/src/ribasim_nl/ribasim_nl/model.py @@ -154,6 +154,10 @@ def remove_node(self, node_id: int, remove_edges: bool = False): from_node_id=row.from_node_id, to_node_id=row.to_node_id, remove_disconnected_nodes=False ) + # remove from used node-ids so we can add it again in the same table + if node_id in table._parent._used_node_ids: + table._parent._used_node_ids.node_ids.remove(node_id) + def update_node(self, node_id, node_type, data, node_properties: dict = {}): existing_node_type = self.node_table().df.at[node_id, "node_type"] From 2e681eae7294603f18f3980703ca1260516a79e3 Mon Sep 17 00:00:00 2001 From: rbruijnshkv <118988966+rbruijnshkv@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:30:16 +0200 Subject: [PATCH 06/23] Peilbeheerst sturing (#135) Added the following: - Improved initial water levels and profiles for the bergende basins - Added assign_authority - Improved the DiscreteControl (not finilized yet though) - Added a workflow txt --------- Co-authored-by: Martijn Visser --- .gitignore | 10 +- .../01_parse_crossings.ipynb | 10 +- .../01b_ad_krw_to_peilgebieden.ipynb | 6 +- .../sturing_AmstelGooienVecht.json | 8 +- .../Shortest_path/02_shortest_path_HHSK.ipynb | 2 +- .../Workflow_peilbeheerst_NL_LHM.txt | 15 ++ .../peilbeheerst_model/assign_authorities.py | 140 ++++++++++++++++++ .../peilbeheerst_model/controle_output.py | 9 +- 8 files changed, 173 insertions(+), 27 deletions(-) create mode 100644 src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt create mode 100644 src/peilbeheerst_model/peilbeheerst_model/assign_authorities.py diff --git a/.gitignore b/.gitignore index 306aa97..66e02d5 100644 --- a/.gitignore +++ b/.gitignore @@ -20,17 +20,13 @@ dmypy.json # pytest --basetemp src/hydamo/tests/temp/ src/ribasim_nl/tests/temp/ -src/peilbeheerst_model/tests/temp/ src/peilbeheerst_model/*.html src/peilbeheerst_model/*.code-workspace src/peilbeheerst_model/.vscode -src/peilbeheerst_model/*.jpeg -src/peilbeheerst_model/*.gpkg -src/peilbeheerst_model/tests_results -/src/peilbeheerst_model/Output_zdrive -/src/peilbeheerst_model/Rekenend_Model_Test -/src/peilbeheerst_model/vervallen +/src/peilbeheerst_model/Parametrize/ribasim +/src/peilbeheerst_model/Parametrize/fix_FF_HHSK.ipynb +/src/peilbeheerst_model/01_test_parse_crossings.ipynb notebooks/rijkswaterstaat/plots/ diff --git a/src/peilbeheerst_model/01_parse_crossings.ipynb b/src/peilbeheerst_model/01_parse_crossings.ipynb index 026f976..d6c7eb9 100644 --- a/src/peilbeheerst_model/01_parse_crossings.ipynb +++ b/src/peilbeheerst_model/01_parse_crossings.ipynb @@ -36,8 +36,8 @@ " print_df[funcname].append(pd.Series(func_args, name=waterschap))\n", "\n", "for funcname, df in print_df.items():\n", - " display(HTML(f\"

Function {funcname}:

\"))\n", - " display(pd.DataFrame(df))" + " print(HTML(f\"

Function {funcname}:

\"))\n", + " print(pd.DataFrame(df))" ] }, { @@ -164,8 +164,8 @@ "fig1.savefig(\"network_results.jpeg\", bbox_inches=\"tight\")\n", "fig2.savefig(\"reduction_results.jpeg\", bbox_inches=\"tight\")\n", "\n", - "display(pd.DataFrame(reduction_results, index=waterschappen))\n", - "display(pd.DataFrame(network_results, index=waterschappen))" + "print(pd.DataFrame(reduction_results, index=waterschappen))\n", + "print(pd.DataFrame(network_results, index=waterschappen))" ] }, { @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.11.6" } }, "nbformat": 4, diff --git a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb index edb465e..5f1fe7c 100644 --- a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb +++ b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb @@ -34,8 +34,8 @@ " print_df[funcname].append(pd.Series(func_args, name=waterschap))\n", "\n", "for funcname, df in print_df.items():\n", - " display(HTML(f\"

Function {funcname}:

\"))\n", - " display(pd.DataFrame(df))" + " print(HTML(f\"

Function {funcname}:

\"))\n", + " print(pd.DataFrame(df))" ] }, { @@ -96,7 +96,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.11.6" } }, "nbformat": 4, diff --git a/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json b/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json index 58e2e06..1e34cf2 100644 --- a/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json +++ b/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json @@ -58,7 +58,7 @@ "truth_state": ["FF", "FT", "TF", "TT"], "control_state": ["block", "block", "pass", "block"], "flow_rate_block": 0, - "flow_rate_pass": 0.2, + "flow_rate_pass": 0.20, "node_type": "pump" }, @@ -67,7 +67,7 @@ "truth_state": ["FF", "FT", "TF", "TT"], "control_state": ["block", "block", "pass", "pass"], "flow_rate_block": 0, - "flow_rate_pass": 0.2, + "flow_rate_pass": 0.20, "node_type": "pump" }, @@ -76,7 +76,7 @@ "truth_state": ["FF", "FT", "TF", "TT"], "control_state": ["block", "block", "pass", "pass"], "flow_rate_block": 0, - "flow_rate_pass": 0.2, + "flow_rate_pass": 0.20, "node_type": "pump" }, @@ -85,7 +85,7 @@ "truth_state": ["FF", "FT", "TF", "TT"], "control_state": ["pass", "block", "pass", "block"], "flow_rate_block": 0, - "flow_rate_pass": 0.1, + "flow_rate_pass": 0.10, "node_type": "pump" }, diff --git a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb index a1ae0f8..664b77e 100644 --- a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb +++ b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb @@ -632,7 +632,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.11.6" } }, "nbformat": 4, diff --git a/src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt b/src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt new file mode 100644 index 0000000..3557561 --- /dev/null +++ b/src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt @@ -0,0 +1,15 @@ +Stappenplan ontwikkelen code peilbeheerst NL: +1) Doorloop de preprocessing notebooks. Notebook per waterschap, locatie: peilbeheerst_model/peilbeheerst_model/preprocess_data +2) Doorloop de post processing notebook. Notebook per waterschap, locatie: peilbeheerst_model/peilbeheerst_model/postprocess_data +3) Doorloop de crossings notebook. Eén notebook, locatie: peilbeheerst_model/01_test_parse_crossings.ipynb +4) Doorloop shortest paths notebookS. Notebook per waterschap, locatie: peilbeheerst_model/Shortest_path +5) Doorloop crossings to Ribasim notebook. Eén notebook, alle waterschappen staan onder elkaar, locatie: peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb +6) Doorloop parametrize notebookS. Notebook per waterschap, voor nu alleen nog AGV. Locatie: peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb + +We hadden oorspronkelijk meer parametrize notebooks, maar omdat zo veel is veranderd heb ik deze nu opgeslagen in onze back up. +Deze gaan we alleen gebruiken om te kijken of er nog extra handmatige aanpassingen waren. +Voor de rest gaat het dezelfde workflow volgens als AGV_parametrize. + +Tot slot: stap 1 begon met een overzichtelijk notebook per waterschap. Gedurende het proces van 1.5 jaar zijn deze notebooks steeds groter en onoverzichtelijker geworden, +waarbij niet elke regel meer nodig is. Voor nu ligt er geen prioriteit om dit op te schonen, mede omdat dit een groot risico dat de data (onbedoeld) wijzigt, +waardoor de netwerken zullen veranderen en de feedback formulieren niet meer gebruikt kunnen worden. diff --git a/src/peilbeheerst_model/peilbeheerst_model/assign_authorities.py b/src/peilbeheerst_model/peilbeheerst_model/assign_authorities.py new file mode 100644 index 0000000..44ac5dc --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/assign_authorities.py @@ -0,0 +1,140 @@ +import geopandas as gpd +import numpy as np +import pandas as pd + + +class AssignAuthorities: + def __init__(self, ribasim_model, waterschap, ws_grenzen_path, RWS_grenzen_path, ws_buffer=1025, RWS_buffer=1000): + self.ws_grenzen_path = ws_grenzen_path + self.RWS_grenzen_path = RWS_grenzen_path + + self.ws_buffer = ws_buffer + self.RWS_buffer = RWS_buffer + + self.ribasim_model = ribasim_model + self.waterschap = waterschap + + def assign_authorities(self): + authority_borders = self.retrieve_geodataframe() + ribasim_model = self.embed_authorities_in_model( + ribasim_model=self.ribasim_model, waterschap=self.waterschap, authority_borders=authority_borders + ) + return ribasim_model + + def retrieve_geodataframe(self): + """Main function which calls the other functions.""" + ws_grenzen, RWS_grenzen = self.load_data() + authority_borders = self.clip_and_buffer(ws_grenzen, RWS_grenzen) + authority_borders = self.extent_authority_borders(authority_borders) + + return authority_borders + + def load_data(self): + """Loads and processes the authority areas of the waterschappen and RWS.""" + ws_grenzen = gpd.read_file(self.ws_grenzen_path) + RWS_grenzen = gpd.read_file(self.RWS_grenzen_path) + + # Removing "\n", "waterschap", "Hoogheemraadschap", "van" and spaces and commas + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace(r"\n", "", regex=True) + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace("Waterschap", "", regex=False) + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace("Hoogheemraadschap", "", regex=False) + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace("De S", "S", regex=False) # HDSR + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace("â", "a", regex=False) # WF + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace("van", "", regex=False) + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace(",", "", regex=False) + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace("'", "", regex=False) + ws_grenzen["naam"] = ws_grenzen["naam"].str.replace(" ", "", regex=False) + + ws_grenzen = ws_grenzen.sort_values(by="naam").reset_index(drop=True) + self.ws_grenzen_OG = ws_grenzen.copy() + + # get rid of irrelvant polygons + ws_grenzen = ws_grenzen.explode() + ws_grenzen["area"] = ws_grenzen.area + ws_grenzen = ws_grenzen.loc[ws_grenzen.area > 10000000] # remove some small polygons + ws_grenzen.reset_index(drop=True, inplace=True) + + # add RWS_grenzen. Buffer and dissolve it + RWS_grenzen["geometry"] = RWS_grenzen.buffer(self.RWS_buffer) + RWS_grenzen = RWS_grenzen.dissolve()[["geometry"]] + + return ws_grenzen, RWS_grenzen + + def clip_and_buffer(self, ws_grenzen, RWS_grenzen): + """Clips the waterboard boundaries by removing the RWS areas and applies a buffer to the remaining polygons.""" + # Remove the RWS area in each WS + ws_grenzen_cut_out = gpd.overlay(ws_grenzen, RWS_grenzen, how="symmetric_difference") + ws_grenzen_cut_out.dropna(subset="area", inplace=True) + + # add a name to the RWS area + RWS_grenzen["naam"] = "Rijkswaterstaat" + + # add a buffer to each waterschap. Within this strip an authority will be found. + ws_grenzen_cut_out["geometry"] = ws_grenzen_cut_out.buffer(self.ws_buffer) + + # add the two layers together + authority_borders = pd.concat([ws_grenzen_cut_out, RWS_grenzen]) + authority_borders = authority_borders.reset_index(drop=True) + authority_borders = gpd.GeoDataFrame(authority_borders, geometry="geometry").set_crs(crs="EPSG:28992") + + return authority_borders + + def extent_authority_borders(self, authority_borders): + """Extends the authority borders by combining them with the original waterboard boundaries and dissolving the geometries based on the name.""" + # Add a bit more area by dissolving it with the original gdf + authority_borders = pd.concat([authority_borders, self.ws_grenzen_OG]) + authority_borders = gpd.GeoDataFrame(authority_borders, geometry="geometry").set_crs(crs="EPSG:28992") + authority_borders = authority_borders.dissolve(by="naam", as_index=False) + authority_borders = authority_borders[["naam", "geometry"]] + + return authority_borders + + def embed_authorities_in_model(self, ribasim_model, waterschap, authority_borders): + # create a temp copy of the level boundary df + temp_LB_node = ribasim_model.level_boundary.node.df.copy() + temp_LB_node = temp_LB_node[["node_id", "node_type", "geometry"]] + ribasim_model.level_boundary.static.df = ribasim_model.level_boundary.static.df[["node_id", "level"]] + + # perform a spatial join + joined = gpd.sjoin(temp_LB_node, authority_borders, how="left", op="intersects") + + # #find whether the LevelBoundary flows inward and outward the waterschap + FB_inward = ribasim_model.edge.df.loc[ribasim_model.edge.df.from_node_id.isin(joined.node_id.values)].copy() + FB_outward = ribasim_model.edge.df.loc[ribasim_model.edge.df.to_node_id.isin(joined.node_id.values)].copy() + + # add the current waterschap name in the correct column + FB_inward["meta_to_authority"], FB_outward["meta_from_authority"] = waterschap, waterschap + + temp_LB_node = temp_LB_node.merge( + right=FB_inward[["from_node_id", "meta_to_authority"]], + left_on="node_id", + right_on="from_node_id", + how="left", + ) + + temp_LB_node = temp_LB_node.merge( + right=FB_outward[["to_node_id", "meta_from_authority"]], + left_on="node_id", + right_on="to_node_id", + how="left", + ) + + # #replace the current waterschaps name in the joined layer to NaN, and drop those + joined["naam"].replace(to_replace=waterschap, value=np.nan, inplace=True) + joined = joined.dropna(subset="naam").reset_index(drop=True) + + # now fill the meta_from_authority and meta_to_authority columns. As they already contain the correct position of the current waterschap, the remaining 'naam' will be placed correctly as well + temp_LB_node = temp_LB_node.merge(right=joined[["node_id", "naam"]], on="node_id", how="left") + temp_LB_node.meta_from_authority.fillna(temp_LB_node["naam"], inplace=True) + temp_LB_node.meta_to_authority.fillna(temp_LB_node["naam"], inplace=True) + + # only select the relevant columns + temp_LB_node = temp_LB_node[["node_id", "node_type", "geometry", "meta_from_authority", "meta_to_authority"]] + temp_LB_node = temp_LB_node.drop_duplicates(subset="node_id").reset_index(drop=True) + + # place the meta categories to the static table + ribasim_model.level_boundary.static.df = ribasim_model.level_boundary.static.df.merge( + right=temp_LB_node[["node_id", "meta_from_authority", "meta_to_authority"]], on="node_id", how="left" + ).reset_index(drop=True) + + return ribasim_model diff --git a/src/peilbeheerst_model/peilbeheerst_model/controle_output.py b/src/peilbeheerst_model/peilbeheerst_model/controle_output.py index 2dbf817..e2e0b00 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/controle_output.py +++ b/src/peilbeheerst_model/peilbeheerst_model/controle_output.py @@ -2,14 +2,9 @@ import shutil import geopandas as gpd - -# import matplotlib.pyplot as plt -# import numpy as np import pandas as pd import ribasim -# from ribasim import Model - class Control: def __init__(self, work_dir): @@ -176,7 +171,7 @@ def is_stationary(group): average_last_values = last_24_hours["level"].mean() actual_last_value = group["level"].iloc[-1] - # Calculate the deviation in cm + # Calculate the deviation deviation = abs(actual_last_value - average_last_values) # Determine if it's stationary (deviation <= .11 cm) @@ -208,7 +203,7 @@ def store_data(self, data, output_path): data[str(key)].to_file(output_path + ".gpkg", layer=str(key), driver="GPKG") # copy checks_symbology file from old dir to new dir - output_controle_qlr_path = r"../../../../Data_overig/QGIS_qlr/output_controle.qlr" + output_controle_qlr_path = r"../../../../../Data_overig/QGIS_qlr/output_controle.qlr" shutil.copy(src=output_controle_qlr_path, dst=os.path.join(self.work_dir, "results", "output_controle.qlr")) return From 71697aa0f0fc8d12196f4b38578045330c1efab8 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Tue, 1 Oct 2024 13:04:43 +0200 Subject: [PATCH 07/23] Koppelen LHM 2024.9.0 (#160) Alle code voor de bouw van het LHM 2024.9.0: - Berging-code naar afzonderlijke module] - Afleiding vrij-afwaterende gebieden op het RWS-HWS - Vd Gaast ook toegepast op vrij-afwaterende gebieden op het RWS-HWS (Wilhelminakanaal) --------- Co-authored-by: Maarten Pronk --- notebooks/de_dommel/05_add_berging.py | 202 ++----------------- notebooks/koppelen/01_bergend_gebied.py | 148 ++++++++++++++ notebooks/koppelen/02_koppelen.py | 211 ++++++++++++++++++++ notebooks/koppelen/99_upload_model.py | 8 + open-vscode.bat | 1 + src/ribasim_nl/reset_index.py | 42 ++++ src/ribasim_nl/ribasim_nl/berging.py | 241 +++++++++++++++++++++++ src/ribasim_nl/ribasim_nl/concat.py | 28 ++- src/ribasim_nl/ribasim_nl/model.py | 17 +- src/ribasim_nl/ribasim_nl/reset_index.py | 34 ++-- 10 files changed, 718 insertions(+), 214 deletions(-) create mode 100644 notebooks/koppelen/01_bergend_gebied.py create mode 100644 notebooks/koppelen/02_koppelen.py create mode 100644 notebooks/koppelen/99_upload_model.py create mode 100644 open-vscode.bat create mode 100644 src/ribasim_nl/reset_index.py create mode 100644 src/ribasim_nl/ribasim_nl/berging.py diff --git a/notebooks/de_dommel/05_add_berging.py b/notebooks/de_dommel/05_add_berging.py index ea32582..bcd8f43 100644 --- a/notebooks/de_dommel/05_add_berging.py +++ b/notebooks/de_dommel/05_add_berging.py @@ -1,15 +1,11 @@ # %% import geopandas as gpd -import numpy as np -import numpy.typing as npt import pandas as pd -import rasterio -from rasterio.windows import from_bounds -from rasterstats import zonal_stats from ribasim import Node -from ribasim.nodes import basin, tabulated_rating_curve +from ribasim.nodes import basin from ribasim_nl import CloudStorage, Model +from ribasim_nl.berging import add_basin_statistics, get_basin_profile, get_rating_curve from ribasim_nl.geometry import basin_to_point from shapely.geometry import LineString @@ -18,190 +14,19 @@ ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_parameterized", "model.toml") model = Model.read(ribasim_toml) -banden = { - "maaiveld": 1, - "bodemhoogte_primair_winter": 2, - "bodemhoogte_primair_zomer": 3, - "bodemhoogte_secundair_winter": 4, - "bodemhoogte_secundair_zomer": 5, - "bodemhoogte_tertiair_winter": 6, - "bodemhoogte_tertiair_zomer": 7, - "ghg_2010-2019": 8, - "glg_2010-2019": 9, - "opp_primair": 10, - "opp_secundair": 11, - "opp_tertiair": 12, -} - basin_area_df = gpd.read_file( cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg"), engine="pyogrio", fid_as_index=True ) basin_area_df.set_index("node_id", inplace=True) -lhm_rasters = cloud.joinpath("Basisgegevens", "LHM", "4.3", "input", "LHM_data.tif") -ma_raster = cloud.joinpath("Basisgegevens", "VanDerGaast_QH", "spafvoer1.tif") - - -def sample_raster( - raster_file, - df, - band=1, - fill_value: float | None = None, - all_touched=False, - stats="mean", - maaiveld_data: npt.ArrayLike | None = None, -): - with rasterio.open(raster_file) as raster_src: - # read band - data = raster_src.read(band) - - if maaiveld_data is not None: - data = maaiveld_data - data - - # fill nodata - if fill_value is not None: - data = np.where(data == raster_src.nodata, fill_value, data) - - affine = raster_src.transform - - return zonal_stats(df, data, affine=affine, stats=stats, nodata=raster_src.nodata, all_touched=all_touched) - - -def get_rating_curve(row, min_level, maaiveld: None | float = None): - flow_rate = np.round([0, row.ma * 0.2, row.ma * 0.33, row.ma / 2, row.ma * 2], decimals=2) - depth = np.round([row.glg + 1, row.glg, row.ghg, row.ghg / 2, 0], decimals=2) - - # set GxG < 0 to 0 - depth[depth < 0] = 0 - - # level relative to maaiveld - if maaiveld is not None: - level = maaiveld - depth - else: - level = row.maaiveld - depth - - # make sure level >= min_level - level[level < min_level] = min_level - - # flow_rate in m3/s - flow_rate = flow_rate / 1000 * row.geometry.area / 86400 - - df = pd.DataFrame({"level": np.round(level, decimals=2), "flow_rate": np.round(flow_rate, decimals=5)}) - df.drop_duplicates("level", keep="first", inplace=True) - df.drop_duplicates("flow_rate", keep="last", inplace=True) - - return tabulated_rating_curve.Static(level=df.level, flow_rate=df.flow_rate) - - -def get_basin_profile(basin_polygon, polygon, max_level, min_level): - with rasterio.open(lhm_rasters) as src: - level = np.array([], dtype=float) - area = np.array([], dtype=float) - - # Get the window and its transform - window = from_bounds(*basin_polygon.bounds, transform=src.transform) - - if (window.width) < 1 or (window.height < 1): - window = from_bounds(*basin_polygon.centroid.buffer(125).bounds, transform=src.transform) - window_transform = src.window_transform(window) - - # Primary water bottom-level - window_data = src.read(3, window=window) - # We don't want hoofdwater / doorgaand water to be in profile - if (polygon is None) | (window_data.size == 0): - mask = ~np.isnan(window_data) - else: - mask = rasterio.features.geometry_mask( - [polygon], window_data.shape, window_transform, all_touched=True, invert=True - ) - # Include nodata as False in mask - mask[np.isnan(window_data)] = False - - # add levels - level = np.concat([level, window_data[mask].ravel()]) - - # add areas on same mask - window_data = src.read(10, window=window) - area = np.concat([area, window_data[mask].ravel()]) - - # Secondary water - window_data = src.read(5, window=window) - mask = ~np.isnan(window_data) - level = np.concat([level, window_data[mask].ravel()]) - - window_data = src.read(11, window=window) - area = np.concat([area, window_data[mask].ravel()]) - - # Tertiary water water - window_data = src.read(7, window=window) - mask = ~np.isnan(window_data) - level = np.concat([level, window_data[mask].ravel()]) - - window_data = src.read(12, window=window) - area = np.concat([area, window_data[mask].ravel()]) - - # Make sure area is never larger than polygon-area - area[area > basin_polygon.area] = basin_polygon.area - - # If area is empty, we add min_level at 5% of polygon-area - if area.size == 0: - level = np.append(level, min_level) - area = np.append(area, basin_polygon.area * 0.05) - - # Add extra row with max_level at basin_polygon.area - level = np.append(level, max_level) - area = np.append(area, basin_polygon.area) - - # In pandas for magic - df = pd.DataFrame({"level": np.round(level, decimals=2), "area": np.round(area)}) - df.sort_values(by="level", inplace=True) - df = df.set_index("level").cumsum().reset_index() - df.dropna(inplace=True) - df.drop_duplicates("level", keep="last", inplace=True) +lhm_raster_file = cloud.joinpath("Basisgegevens", "LHM", "4.3", "input", "LHM_data.tif") +ma_raster_file = cloud.joinpath("Basisgegevens", "VanDerGaast_QH", "spafvoer1.tif") - # Return profile - return basin.Profile(area=df.area, level=df.level) +basin_area_df = add_basin_statistics(df=basin_area_df, lhm_raster_file=lhm_raster_file, ma_raster_file=ma_raster_file) -# %% add columns -with rasterio.open(lhm_rasters) as raster_src: - # read band - maaiveld_data = raster_src.read(banden["maaiveld"]) - -# sample rasters -ghg = sample_raster( - raster_file=lhm_rasters, - df=basin_area_df, - band=banden["ghg_2010-2019"], - all_touched=True, - maaiveld_data=maaiveld_data, -) -basin_area_df.loc[:, ["ghg"]] = pd.Series(dtype=float) -basin_area_df.loc[:, ["ghg"]] = [i["mean"] for i in ghg] - -glg = sample_raster( - raster_file=lhm_rasters, - df=basin_area_df, - band=banden["glg_2010-2019"], - all_touched=True, - maaiveld_data=maaiveld_data, -) -basin_area_df.loc[:, ["glg"]] = pd.Series(dtype=float) -basin_area_df.loc[:, ["glg"]] = [i["mean"] for i in glg] - -ma = sample_raster(raster_file=ma_raster, df=basin_area_df, all_touched=True, fill_value=37) # 37mm/dag is -basin_area_df.loc[:, ["ma"]] = pd.Series(dtype=float) -basin_area_df.loc[:, ["ma"]] = [i["mean"] for i in ma] - -maaiveld = sample_raster( - raster_file=lhm_rasters, df=basin_area_df, band=banden["maaiveld"], stats="mean min max", all_touched=True -) -basin_area_df.loc[:, ["maaiveld"]] = pd.Series(dtype=float) -basin_area_df.loc[:, ["maaiveld"]] = [i["mean"] if pd.isna(i["mean"]) else i["mean"] for i in maaiveld] -basin_area_df.loc[:, ["maaiveld_max"]] = [i["max"] if pd.isna(i["max"]) else i["max"] for i in maaiveld] -basin_area_df.loc[:, ["maaiveld_min"]] = [i["min"] if pd.isna(i["min"]) else i["min"] for i in maaiveld] # %%update model edge_id = model.edge.df.index.max() + 1 for row in model.basin.node.df.itertuples(): @@ -233,7 +58,13 @@ def get_basin_profile(basin_polygon, polygon, max_level, min_level): min_level = max_level = basin_area_df.at[node_id, "maaiveld_min"] if min_level == max_level: min_level -= 0.1 - basin_profile = get_basin_profile(basin_polygon, polygon, max_level=max_level, min_level=min_level) + basin_profile = get_basin_profile( + basin_polygon=basin_polygon, + polygon=polygon, + max_level=max_level, + min_level=min_level, + lhm_raster_file=lhm_raster_file, + ) data = [ basin_profile, basin.State(level=[basin_profile.df.level.min() + 0.1]), @@ -267,6 +98,15 @@ def get_basin_profile(basin_polygon, polygon, max_level, min_level): else: print(f"Geen basin-vlak voor {node_id}") +# %% +df = pd.DataFrame({"node_id": model.basin.node.df.index.to_list()}) +df.index.name = "fid" +df.loc[:, "precipitation"] = 5.787037e-08 +df.loc[:, "potential_evaporation"] = 1.157407e-08 +df.loc[:, "drainage"] = 0 +df.loc[:, "infiltration"] = 0 +model.basin.static.df = df + # %% ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_bergend", "model.toml") diff --git a/notebooks/koppelen/01_bergend_gebied.py b/notebooks/koppelen/01_bergend_gebied.py new file mode 100644 index 0000000..8038159 --- /dev/null +++ b/notebooks/koppelen/01_bergend_gebied.py @@ -0,0 +1,148 @@ +# %% +import geopandas as gpd +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin +from ribasim_nl import CloudStorage, Model +from ribasim_nl.berging import add_basin_statistics, get_basin_profile, get_rating_curve +from ribasim_nl.geodataframe import split_basins +from ribasim_nl.geometry import basin_to_point +from shapely.geometry import LineString, MultiPolygon + +cloud = CloudStorage() + +# %% RWS-HWS +model_path = cloud.joinpath("Rijkswaterstaat", "modellen", "hws") +toml_file = model_path / "hws.toml" +rws_model = Model.read(toml_file) + +# %% DeDommel +model_path = cloud.joinpath("DeDommel", "modellen", "DeDommel") +toml_file = model_path / "model.toml" +model = Model.read(toml_file) +basin_polygon = model.basin.area.df[model.basin.area.df.node_id != 1228].union_all() + +drainage_area = gpd.read_file( + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "areas.gpkg"), layer="drainage_areas" +).union_all() + + +rws_selected_areas_df = rws_model.basin.area.df[rws_model.basin.area.df.intersects(drainage_area.buffer(-10))] +rws_selected_areas = rws_selected_areas_df.union_all() + +poly = ( + rws_model.basin.area.df[rws_model.basin.area.df.intersects(drainage_area.buffer(-10))] + .buffer(0.1) + .union_all() + .buffer(3000) +) +poly = poly.difference(basin_polygon).intersection(drainage_area) + +berging_basins_df = gpd.GeoDataFrame(geometry=gpd.GeoSeries(poly.geoms, crs=28992)) + +berging_basins_df = berging_basins_df[berging_basins_df.geom_type == "Polygon"] +berging_basins_df = berging_basins_df[berging_basins_df.intersects(rws_selected_areas)] +berging_basins_df = berging_basins_df[berging_basins_df.area > 50] + +cut_lines_df = gpd.read_file(cloud.joinpath("Rijkswaterstaat", "verwerkt", "couple_user_data.gpkg"), layer="cut_lines") + +berging_basins_df = split_basins(berging_basins_df, cut_lines_df) +berging_basins_df = berging_basins_df[berging_basins_df.intersects(rws_selected_areas)] + + +rws_selected_basins_df = rws_model.basin.node.df[rws_model.basin.node.df.index.isin(rws_selected_areas_df.node_id)] + +berging_basins_df.loc[:, "node_id"] = berging_basins_df.geometry.apply( + lambda x: rws_selected_basins_df.distance(x).idxmin() +) + +berging_basins_df.to_file(cloud.joinpath("Rijkswaterstaat", "verwerkt", "bergende_basins_rws.gpkg")) + + +# %% +rws_model.update_meta_properties({"meta_categorie": "hoofdwater"}) + + +# %% toevoegen bergende gebieden +basin_area_df = gpd.read_file(cloud.joinpath("Rijkswaterstaat", "verwerkt", "bergende_basins_rws.gpkg")) +basin_area_df.set_index("node_id", inplace=True) +lhm_raster_file = cloud.joinpath("Basisgegevens", "LHM", "4.3", "input", "LHM_data.tif") +ma_raster_file = cloud.joinpath("Basisgegevens", "VanDerGaast_QH", "spafvoer1.tif") +basin_area_df = add_basin_statistics(df=basin_area_df, lhm_raster_file=lhm_raster_file, ma_raster_file=ma_raster_file) + +# %% +edge_id = rws_model.edge.df.index.max() + 1 +for row in rws_model.basin.node.df[rws_model.basin.node.df.index.isin(basin_area_df.index)].itertuples(): + # row = next(row for row in model.basin.node.df.itertuples() if row.Index == 1013) + node_id = row.Index + + if node_id in basin_area_df.index: + # basin-polygon + basin_row = basin_area_df.loc[node_id] + basin_polygon = basin_area_df.at[node_id, "geometry"] + + # add basin-node + basin_node_id = ( + rws_model.next_node_id + ) # FIXME: can be removed if issue is closed https://github.com/Deltares/Ribasim/issues/1805 + geometry = basin_to_point(basin_polygon=basin_polygon, tolerance=10) + node = Node( + node_id=basin_node_id, + meta_categorie="bergend", + geometry=geometry, + ) + + if node_id in rws_model.basin.area.df.node_id.to_list(): + polygon = rws_model.basin.area.df.set_index("node_id").at[node_id, "geometry"] + else: + polygon = None + + max_level = max_level = basin_area_df.at[node_id, "maaiveld_max"] + min_level = max_level = basin_area_df.at[node_id, "maaiveld_min"] + if min_level == max_level: + min_level -= 0.1 + basin_profile = get_basin_profile( + basin_polygon=basin_polygon, + polygon=polygon, + max_level=max_level, + min_level=min_level, + lhm_raster_file=lhm_raster_file, + ) + data = [ + basin_profile, + basin.State(level=[basin_profile.df.level.min() + 0.1]), + basin.Area(geometry=[MultiPolygon([basin_polygon])]), + ] + basin_node = rws_model.basin.add(node=node, tables=data) + + # get line + line = LineString([geometry, row.geometry]) + + # add tabulated rating curve + tbr_node_id = rws_model.next_node_id + geometry = line.interpolate(0.5, normalized=True) + node = Node( + node_id=tbr_node_id, + meta_categorie="bergend", + geometry=geometry, + ) + if any(pd.isna(getattr(basin_row, i)) for i in ["ghg", "glg", "ma"]): + raise ValueError(f"No valid ghg, glg and/or ma for basin_id {node_id}") + else: + data = [get_rating_curve(row=basin_row, min_level=basin_profile.df.level.min())] + tbr_node = rws_model.tabulated_rating_curve.add(node=node, tables=data) + + # add edges + edge_id += 1 # FIXME: can be removed if issue is closed https://github.com/Deltares/Ribasim/issues/1804 + rws_model.edge.add(basin_node, tbr_node, edge_id=edge_id, meta_categorie="bergend") + edge_id += 1 + rws_model.edge.add(tbr_node, rws_model.basin[node_id], edge_id=edge_id, meta_categorie="bergend") + + else: + print(f"Geen basin-vlak voor {node_id}") + +# %% +model_path = cloud.joinpath("Rijkswaterstaat", "modellen", "hws_bergend") +toml_file = model_path / "hws.toml" + +rws_model.write(toml_file) diff --git a/notebooks/koppelen/02_koppelen.py b/notebooks/koppelen/02_koppelen.py new file mode 100644 index 0000000..08568f3 --- /dev/null +++ b/notebooks/koppelen/02_koppelen.py @@ -0,0 +1,211 @@ +# %% +from networkx import NetworkXNoPath +from ribasim_nl import CloudStorage, Model, Network, reset_index +from ribasim_nl.case_conversions import pascal_to_snake_case +from ribasim_nl.concat import concat +from shapely.geometry import LineString + +cloud = CloudStorage() + +# %% update RWS-HWS + +# RWS-HWS +model_path = cloud.joinpath("Rijkswaterstaat", "modellen", "hws_bergend") +toml_file = model_path / "hws.toml" +rws_model = Model.read(toml_file) + +# some fixes +node_id = 8413 +level = rws_model.upstream_profile(node_id).level.min() + 0.1 + +mask = (rws_model.tabulated_rating_curve.static.df.node_id == node_id) & ( + rws_model.tabulated_rating_curve.static.df.level < level +) +rws_model.tabulated_rating_curve.static.df.loc[mask, ["level"]] = level + +# reset index +rws_model = reset_index(rws_model) + +# # write model +rws_model.update_meta_properties(node_properties={"authority": "Rijkswaterstaat"}) +rws_model.write(model_path.with_name("hws_temp") / "hws.toml") + + +# %% update AGV + +# AGV +model_path = cloud.joinpath("AmstelGooienVecht", "modellen", "AmstelGooienVecht_parametrized_2024_8_47") +if not model_path.exists(): + model_url = cloud.joinurl("AmstelGooienVecht", "modellen", "AmstelGooienVecht_parametrized_2024_8_47") + cloud.download_content(model_url) +toml_file = model_path / "ribasim.toml" +# update_database(toml_file) +agv_model = Model.read(toml_file) + +# fix manning issue +agv_model.manning_resistance.static.df = agv_model.manning_resistance.static.df[ + agv_model.manning_resistance.static.df.node_id.isin(agv_model.node_table().df.index) +] + +# fix boundary-node issue +agv_model.remove_node(957, remove_edges=False) + +# reset index +agv_model = reset_index(agv_model, node_start=rws_model.next_node_id) + +# # write model +agv_model.update_meta_properties(node_properties={"authority": "AmstelGooienVecht"}) +agv_model.write(model_path.with_name("AmstelGooienVecht_temp") / "agv.toml") + +# %% update De Dommel + +# update De Dommel +model_path = cloud.joinpath("DeDommel", "modellen", "DeDommel") +toml_file = model_path / "model.toml" +# update_database(toml_file) +dommel_model = Model.read(toml_file) + +# set LevelBoundary from-to RWS +mask = dommel_model.level_boundary.static.df.node_id.isin([19, 20, 21, 22, 23, 24, 25, 27]) +dommel_model.level_boundary.static.df.loc[mask, "meta_from_authority"] = "DeDommel" +dommel_model.level_boundary.static.df.loc[mask, "meta_to_authority"] = "Rijkswaterstaat" + +mask = dommel_model.level_boundary.static.df.node_id.isin([17, 18]) +dommel_model.level_boundary.static.df.loc[mask, "meta_from_authority"] = "Rijkswaterstaat" +dommel_model.level_boundary.static.df.loc[mask, "meta_to_authority"] = "DeDommel" + +# reset index +dommel_model = reset_index(dommel_model, node_start=agv_model.next_node_id) + +# # write model +dommel_model.update_meta_properties(node_properties={"authority": "DeDommel"}) + + +dommel_model.write(model_path.with_name("DeDommel_temp") / "de_dommel.toml") + +# %% prepare coupling + +# prepare coupling +netwerk_mask_poly = agv_model.basin.area.df.union_all() + +models = [rws_model, dommel_model] +coupled_model = concat(models) + +network = Network.from_network_gpkg(cloud.joinpath("Rijkswaterstaat", "verwerkt", "netwerk.gpkg")) + +# %% coupling + +# couple boundaries +boundary_node_ids = coupled_model.level_boundary.static.df[ + (coupled_model.level_boundary.static.df.meta_to_authority == "Rijkswaterstaat") + | (coupled_model.level_boundary.static.df.meta_from_authority == "Rijkswaterstaat") +].node_id.to_list() + +mask = (coupled_model.node_table().df.meta_authority == "Rijkswaterstaat") & ( + coupled_model.node_table().df.meta_categorie == "hoofdwater" +) + +basin_ids = coupled_model.node_table().df[mask].index.to_list() +basin_areas_df = coupled_model.basin.area.df[coupled_model.basin.area.df.node_id.isin(basin_ids)].set_index("node_id") + +for boundary_node_id in boundary_node_ids: + # boundary_node_id = boundary_node_ids[0] + boundary_node = coupled_model.level_boundary[boundary_node_id] + # get upstream node to couple from + try: + # get basin-id to couple to + to_node_id = basin_areas_df.distance(boundary_node.geometry).idxmin() + to_node = coupled_model.basin[to_node_id] + listen_node_id = to_node_id + + # get to network node + to_network_node = network.nodes.distance(to_node.geometry).idxmin() + + # get node to couple from + from_node_id = coupled_model.upstream_node_id(boundary_node_id) + from_node_type = coupled_model.node_table().df.at[from_node_id, "node_type"] + from_node = getattr(coupled_model, pascal_to_snake_case(from_node_type))[from_node_id] + + # get from network node + link_idx = iter(network.links.distance(from_node.geometry).sort_values().index) + edge_geometry = None + while edge_geometry is None: + idx = next(link_idx) + try: + link_geom = network.links.at[idx, "geometry"] + if link_geom.intersects(netwerk_mask_poly): + continue + projected_point = link_geom.interpolate(link_geom.project(from_node.geometry)) + if network.nodes.distance(projected_point).min() > 10: + from_network_node = network.add_node(projected_point, max_distance=9) + else: + from_network_node = network.nodes.distance(projected_point).idxmin() + edge_geometry = network.get_line(from_network_node, to_network_node) + except NetworkXNoPath: + continue + + except KeyError: + # get basin-id to couple from + from_node_id = basin_areas_df.distance(boundary_node.geometry).idxmin() + from_node = coupled_model.basin[from_node_id] + listen_node_id = from_node_id + + # get from network node + from_network_node = network.nodes.distance(from_node.geometry).idxmin() + + # get node to couple to + to_node_id = coupled_model.downstream_node_id(boundary_node_id) + to_node_type = coupled_model.node_table().df.at[to_node_id, "node_type"] + to_node = getattr(coupled_model, pascal_to_snake_case(to_node_type))[to_node_id] + + # get edge geometry + link_idx = iter(network.links.distance(to_node.geometry).sort_values().index) + edge_geometry = None + while edge_geometry is None: + idx = next(link_idx) + try: + link_geom = network.links.at[idx, "geometry"] + if link_geom.intersects(netwerk_mask_poly): + continue + projected_point = link_geom.interpolate(link_geom.project(to_node.geometry)) + if network.nodes.distance(projected_point).min() > 10: + to_network_node = network.add_node(projected_point, max_distance=9) + else: + to_network_node = network.nodes.distance(projected_point).idxmin() + edge_geometry = network.get_line(from_network_node, to_network_node) + except NetworkXNoPath: + continue + + # remove boundary node + coupled_model.remove_node(boundary_node_id, remove_edges=True) + + # update discrete control + mask = coupled_model.discrete_control.variable.df.listen_node_id == boundary_node_id + coupled_model.discrete_control.variable.df.loc[mask, ["listen_node_id"]] = listen_node_id + + # construct edge-geometry + if edge_geometry.boundary.geoms[0].distance(from_node.geometry) > 0.001: + edge_geometry = LineString(tuple(from_node.geometry.coords) + tuple(edge_geometry.coords)) + if edge_geometry.boundary.geoms[1].distance(to_node.geometry) > 0.001: + edge_geometry = LineString(tuple(edge_geometry.coords) + tuple(to_node.geometry.coords)) + + # add edge + edge_id = coupled_model.edge.df.index.max() + 1 + coupled_model.edge.add( + edge_id=edge_id, + from_node=from_node, + to_node=to_node, + geometry=edge_geometry, + meta_from_authority="AmstelGooiEnVecht", + meta_to_authority="Rijkswaterstaat", + ) + + +# %% + +model_path = cloud.joinpath("Rijkswaterstaat", "modellen", "lhm") +toml_file = model_path / "lhm.toml" + +coupled_model.write(toml_file) + +# %% diff --git a/notebooks/koppelen/99_upload_model.py b/notebooks/koppelen/99_upload_model.py new file mode 100644 index 0000000..2d62e90 --- /dev/null +++ b/notebooks/koppelen/99_upload_model.py @@ -0,0 +1,8 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +cloud.upload_model("Rijkswaterstaat", "lhm", include_results=True, include_plots=False) + +# %% diff --git a/open-vscode.bat b/open-vscode.bat new file mode 100644 index 0000000..ae86837 --- /dev/null +++ b/open-vscode.bat @@ -0,0 +1 @@ +pixi run --environment=dev code . | exit diff --git a/src/ribasim_nl/reset_index.py b/src/ribasim_nl/reset_index.py new file mode 100644 index 0000000..cfb78b3 --- /dev/null +++ b/src/ribasim_nl/reset_index.py @@ -0,0 +1,42 @@ +# %% +import pandas as pd +from ribasim import Model +from ribasim_nl.case_conversions import pascal_to_snake_case + + +def reset_index(model: Model, node_start=1): + # only reset if we have to + node_id_min = model.node_table().df.index.min() + node_id_max = model.node_table().df.index.max() + expected_length = node_id_max - node_id_min + 1 + if not ((node_start == node_id_min) and (expected_length == len(model.node_table().df))): + # make sure column node_id == index + node_ids = model.node_table().df.index + + # create a new index for re-indexing all tables + index = pd.Series(data=[i + node_start for i in range(len(node_ids))], index=node_ids).astype("int32") + + # renumber edges + model.edge.df.loc[:, ["from_node_id"]] = model.edge.df["from_node_id"].apply(lambda x: index[x]) + + model.edge.df.loc[:, ["to_node_id"]] = model.edge.df["to_node_id"].apply(lambda x: index[x]) + + # renumber tables + for node_type in model.node_table().df.node_type.unique(): + ribasim_node = getattr(model, pascal_to_snake_case(node_type)) + for attr in ribasim_node.model_fields.keys(): + table = getattr(ribasim_node, attr) + try: + if table.df is not None: + if "node_id" in table.df.columns: + table.df.loc[:, "node_id"] = table.df["node_id"].apply(lambda x: index[x]) + table.df.index += 1 + elif table.df.index.name == "node_id": + table.df.index = table.df.reset_index("node_id")["node_id"].apply(lambda x: index.loc[x]) + # table.df.index.name = "node_id" + except KeyError as e: + raise KeyError(f"node_id {e} not in {node_type} / {attr} not in node-table") + return model + + +# %% diff --git a/src/ribasim_nl/ribasim_nl/berging.py b/src/ribasim_nl/ribasim_nl/berging.py new file mode 100644 index 0000000..fe8a3a8 --- /dev/null +++ b/src/ribasim_nl/ribasim_nl/berging.py @@ -0,0 +1,241 @@ +from pathlib import Path + +import numpy as np +import numpy.typing as npt +import pandas as pd +import rasterio +from geopandas import GeoDataFrame +from rasterio.windows import from_bounds +from rasterstats import zonal_stats +from ribasim.nodes import basin, tabulated_rating_curve +from shapely.geometry import Polygon + +BANDEN = { + "maaiveld": 1, + "bodemhoogte_primair_winter": 2, + "bodemhoogte_primair_zomer": 3, + "bodemhoogte_secundair_winter": 4, + "bodemhoogte_secundair_zomer": 5, + "bodemhoogte_tertiair_winter": 6, + "bodemhoogte_tertiair_zomer": 7, + "ghg_2010-2019": 8, + "glg_2010-2019": 9, + "opp_primair": 10, + "opp_secundair": 11, + "opp_tertiair": 12, +} + + +def sample_raster( + raster_file: Path, + df: GeoDataFrame, + band: int = 1, + fill_value: float | None = None, + all_touched: bool = False, + stats: str = "mean", + maaiveld_data: npt.ArrayLike | None = None, +): + """Sample rasters over Polygons + + Args: + raster_file (Path): Raster-file to sample + df (GeoDataFrame): GeoDataFrame with polygons + band (int, optional): Band in raster-file to sample from. Defaults to 1. + fill_value (float | None, optional): Fill-value for nodata-cells. Defaults to None. + all_touched (bool, optional): rasterize all_touched setting. Defaults to False. + stats (str, optional): rasterstats stats setting. Defaults to "mean". + maaiveld_data (npt.ArrayLike | None, optional): If numpy-array in same shape as raster, raster-data will be subtracted from it. Defaults to None. + + Returns + ------- + list[dict]: Rasterstats output + """ + with rasterio.open(raster_file) as raster_src: + # read band + data = raster_src.read(band) + + if maaiveld_data is not None: + data = maaiveld_data - data + + # fill nodata + if fill_value is not None: + data = np.where(data == raster_src.nodata, fill_value, data) + + affine = raster_src.transform + + return zonal_stats(df, data, affine=affine, stats=stats, nodata=raster_src.nodata, all_touched=all_touched) + + +def add_basin_statistics(df: GeoDataFrame, lhm_raster_file: Path, ma_raster_file: Path) -> GeoDataFrame: + """Add Vd Gaast basin-statistics to a Polygon basin GeoDataFrame + + Args: + df (GeoDataFrame): GeoDataFrame with basins + lhm_raster_file (Path): LHM raster-file with layers + ma_raster_file (Path): Specific discharge (maatgevende afvoer) raster + + Returns + ------- + GeoDataFrame: GeoDataFrame with basins ánd statistics + """ + with rasterio.open(lhm_raster_file) as raster_src: + # read band + maaiveld_data = raster_src.read(BANDEN["maaiveld"]) + + # sample rasters + ghg = sample_raster( + raster_file=lhm_raster_file, + df=df, + band=BANDEN["ghg_2010-2019"], + all_touched=True, + maaiveld_data=maaiveld_data, + ) + df.loc[:, ["ghg"]] = pd.Series(dtype=float) + df.loc[:, ["ghg"]] = [i["mean"] for i in ghg] + + glg = sample_raster( + raster_file=lhm_raster_file, + df=df, + band=BANDEN["glg_2010-2019"], + all_touched=True, + maaiveld_data=maaiveld_data, + ) + df.loc[:, ["glg"]] = pd.Series(dtype=float) + df.loc[:, ["glg"]] = [i["mean"] for i in glg] + + ma = sample_raster(raster_file=ma_raster_file, df=df, all_touched=True, fill_value=37) # 37mm/dag is + df.loc[:, ["ma"]] = pd.Series(dtype=float) + df.loc[:, ["ma"]] = [i["mean"] for i in ma] + + maaiveld = sample_raster( + raster_file=lhm_raster_file, df=df, band=BANDEN["maaiveld"], stats="mean min max", all_touched=True + ) + df.loc[:, ["maaiveld"]] = pd.Series(dtype=float) + df.loc[:, ["maaiveld"]] = [i["mean"] if pd.isna(i["mean"]) else i["mean"] for i in maaiveld] + df.loc[:, ["maaiveld_max"]] = [i["max"] if pd.isna(i["max"]) else i["max"] for i in maaiveld] + df.loc[:, ["maaiveld_min"]] = [i["min"] if pd.isna(i["min"]) else i["min"] for i in maaiveld] + + return df + + +def get_rating_curve(row, min_level: float, maaiveld: None | float = None) -> tabulated_rating_curve.Static: + """Generate a tabulated_rating_curve.Static object from basin_statistics + + Args: + row (pd.Series): Row in a GeoDataFrame containing basin_statistics + min_level (float): minimal level in rating curve (basin.profile.level.min() of upstream basin) + maaiveld (None | float, optional): surface-level. If none, it should be in row.maaiveld. Defaults to None. + + Returns + ------- + tabulated_rating_curve.Static: Static-table for tabulated_rating_curve node + """ + flow_rate = np.round([0, row.ma * 0.2, row.ma * 0.33, row.ma / 2, row.ma * 2], decimals=2) + depth = np.round([row.glg + 1, row.glg, row.ghg, row.ghg / 2, 0], decimals=2) + + # set GxG < 0 to 0 + depth[depth < 0] = 0 + + # level relative to maaiveld + if maaiveld is not None: + level = maaiveld - depth + else: + level = row.maaiveld - depth + + # make sure level >= min_level + level[level < min_level] = min_level + + # flow_rate in m3/s + flow_rate = flow_rate / 1000 * row.geometry.area / 86400 + + df = pd.DataFrame({"level": np.round(level, decimals=2), "flow_rate": np.round(flow_rate, decimals=5)}) + df.drop_duplicates("level", keep="first", inplace=True) + df.drop_duplicates("flow_rate", keep="last", inplace=True) + + return tabulated_rating_curve.Static(level=df.level, flow_rate=df.flow_rate) + + +def get_basin_profile( + basin_polygon: Polygon, polygon: Polygon, max_level: float, min_level: float, lhm_raster_file: Path +) -> basin.Profile: + """Generate a basin.Static table for a Polygon using LHM rasters + + Args: + basin_polygon (Polygon): Polygon defining the basin + polygon (Polygon): Polygon defining all waters that are to be subtracted in primary waters + max_level (float): minimal level in basin-profile + min_level (float): maximal level in basin-profile + lhm_raster_file (Path): path to lhm-rasters + + Returns + ------- + basin.Profile: basin-profile for basin-node + """ + with rasterio.open(lhm_raster_file) as src: + level = np.array([], dtype=float) + area = np.array([], dtype=float) + + # Get the window and its transform + window = from_bounds(*basin_polygon.bounds, transform=src.transform) + + if (window.width) < 1 or (window.height < 1): + window = from_bounds(*basin_polygon.centroid.buffer(125).bounds, transform=src.transform) + window_transform = src.window_transform(window) + + # Primary water bottom-level + window_data = src.read(3, window=window) + + # We don't want hoofdwater / doorgaand water to be in profile + if (polygon is None) | (window_data.size == 0): + mask = ~np.isnan(window_data) + else: + mask = rasterio.features.geometry_mask( + [polygon], window_data.shape, window_transform, all_touched=True, invert=True + ) + # Include nodata as False in mask + mask[np.isnan(window_data)] = False + + # add levels + level = np.concat([level, window_data[mask].ravel()]) + + # add areas on same mask + window_data = src.read(10, window=window) + area = np.concat([area, window_data[mask].ravel()]) + + # Secondary water + window_data = src.read(5, window=window) + mask = ~np.isnan(window_data) + level = np.concat([level, window_data[mask].ravel()]) + + window_data = src.read(11, window=window) + area = np.concat([area, window_data[mask].ravel()]) + + # Tertiary water water + window_data = src.read(7, window=window) + mask = ~np.isnan(window_data) + level = np.concat([level, window_data[mask].ravel()]) + + window_data = src.read(12, window=window) + area = np.concat([area, window_data[mask].ravel()]) + + # Make sure area is never larger than polygon-area + area[area > basin_polygon.area] = basin_polygon.area + + # If area is empty, we add min_level at 5% of polygon-area + if area.size == 0: + level = np.append(level, min_level) + area = np.append(area, basin_polygon.area * 0.05) + + # Add extra row with max_level at basin_polygon.area + level = np.append(level, max_level) + area = np.append(area, basin_polygon.area) + + # In pandas for magic + df = pd.DataFrame({"level": np.round(level, decimals=2), "area": np.round(area)}) + df.sort_values(by="level", inplace=True) + df = df.set_index("level").cumsum().reset_index() + df.dropna(inplace=True) + df.drop_duplicates("level", keep="last", inplace=True) + + # Return profile + return basin.Profile(area=df.area, level=df.level) diff --git a/src/ribasim_nl/ribasim_nl/concat.py b/src/ribasim_nl/ribasim_nl/concat.py index f08a6f9..5a9368e 100644 --- a/src/ribasim_nl/ribasim_nl/concat.py +++ b/src/ribasim_nl/ribasim_nl/concat.py @@ -1,5 +1,4 @@ import pandas as pd -import ribasim from ribasim import Model from ribasim_nl import reset_index @@ -21,24 +20,17 @@ def concat(models: list[Model]) -> Model: """ # models will be concatenated to first model. model = reset_index(models[0]) - # determine node_start of next model - node_start = model.node_table().df.node_id.max() + 1 # concat all other models into model for merge_model in models[1:]: - # reset index + # reset index of mergemodel, node_start is max node_id + node_start = model.node_table().df.index.max() + 1 merge_model = reset_index(merge_model, node_start) - # determine node_start of next model - node_start = model.node_table().df.node_id.max() + 1 - - # merge network - # model.network.node = ribasim.Node( - # df=pd.concat([model.network.node.df, merge_model.network.node.df]) - # ) - model.edge = ribasim.EdgeTable( - df=pd.concat([model.edge.df, merge_model.edge.df], ignore_index=True).reset_index(drop=True) - ) + # concat edges + edge_df = pd.concat([model.edge.df, merge_model.edge.df], ignore_index=True) + edge_df.index.name = "edge_id" + model.edge.df = edge_df # merge tables for node_type in model.node_table().df.node_type.unique(): @@ -51,7 +43,13 @@ def concat(models: list[Model]) -> Model: if merge_model_df is not None: if model_df is not None: # make sure we concat both df's into the correct ribasim-object - df = pd.concat([model_df, merge_model_df], ignore_index=True) + if "node_id" in model_df.columns: + df = pd.concat([model_df, merge_model_df], ignore_index=True) + df.index.name = "fid" + elif model_df.index.name == "node_id": + df = pd.concat([model_df, merge_model_df], ignore_index=False) + else: + raise Exception(f"{node_type} / {attr} cannot be merged") else: df = merge_model_df model_node_table.df = df diff --git a/src/ribasim_nl/ribasim_nl/model.py b/src/ribasim_nl/ribasim_nl/model.py index 4311a35..f8a4b51 100644 --- a/src/ribasim_nl/ribasim_nl/model.py +++ b/src/ribasim_nl/ribasim_nl/model.py @@ -139,7 +139,7 @@ def remove_node(self, node_id: int, remove_edges: bool = False): for attr in table.model_fields.keys(): df = getattr(table, attr).df if df is not None: - if node_id in df.columns: + if "node_id" in df.columns: getattr(table, attr).df = df[df.node_id != node_id] else: getattr(table, attr).df = df[df.index != node_id] @@ -153,11 +153,24 @@ def remove_node(self, node_id: int, remove_edges: bool = False): self.remove_edge( from_node_id=row.from_node_id, to_node_id=row.to_node_id, remove_disconnected_nodes=False ) - # remove from used node-ids so we can add it again in the same table if node_id in table._parent._used_node_ids: table._parent._used_node_ids.node_ids.remove(node_id) + def update_meta_properties(self, node_properties: dict, node_types: list | None = None): + """Set properties for all, or a selection of, node-types.""" + if node_types is None: + node_types = self.node_table().df.node_type.unique() + + for node_type in node_types: + table = getattr(self, pascal_to_snake_case(node_type)) + node_df = getattr(table, "node").df + if node_df is not None: + for key, value in node_properties.items(): + if not key.startswith("meta_"): + key = f"meta_{key}" + node_df.loc[:, [key]] = value + def update_node(self, node_id, node_type, data, node_properties: dict = {}): existing_node_type = self.node_table().df.at[node_id, "node_type"] diff --git a/src/ribasim_nl/ribasim_nl/reset_index.py b/src/ribasim_nl/ribasim_nl/reset_index.py index 937dae3..8c21426 100644 --- a/src/ribasim_nl/ribasim_nl/reset_index.py +++ b/src/ribasim_nl/ribasim_nl/reset_index.py @@ -7,37 +7,39 @@ def reset_index(model: Model, node_start=1): # only reset if we have to - node_id_min = model.node_table().df.node_id.min() - node_id_max = model.node_table().df.node_id.max() + node_id_min = model.node_table().df.index.min() + node_id_max = model.node_table().df.index.max() expected_length = node_id_max - node_id_min + 1 if not ((node_start == node_id_min) and (expected_length == len(model.node_table().df))): # make sure column node_id == index - node_ids = model.node_table().df.node_id + node_ids = model.node_table().df.index # create a new index for re-indexing all tables index = pd.Series(data=[i + node_start for i in range(len(node_ids))], index=node_ids).astype("int32") - # # re-index node_id and fid - # model.network.node.df.index = model.network.node.df["fid"].apply( - # lambda x: index.loc[x] - # ) - # model.network.node.df.index.name = "fid" - # model.network.node.df.drop(columns=["fid"], inplace=True) - # model.network.node.df.loc[:, "node_id"] = model.network.node.df.index - # renumber edges - model.edge.df.loc[:, ["from_node_id"]] = model.edge.df["from_node_id"].apply(lambda x: index.loc[x]) + model.edge.df.loc[:, ["from_node_id"]] = model.edge.df["from_node_id"].apply(lambda x: index[x]) - model.edge.df.loc[:, ["to_node_id"]] = model.edge.df["to_node_id"].apply(lambda x: index.loc[x]) + model.edge.df.loc[:, ["to_node_id"]] = model.edge.df["to_node_id"].apply(lambda x: index[x]) # renumber tables for node_type in model.node_table().df.node_type.unique(): ribasim_node = getattr(model, pascal_to_snake_case(node_type)) for attr in ribasim_node.model_fields.keys(): table = getattr(ribasim_node, attr) - if table.df is not None: - table.df.loc[:, "node_id"] = table.df["node_id"].apply(lambda x: index.loc[x]) - + try: + if table.df is not None: + if "node_id" in table.df.columns: + table.df.loc[:, "node_id"] = table.df["node_id"].apply(lambda x: index[x]) + table.df.index += 1 + if "listen_node_id" in table.df.columns: + table.df.loc[:, "listen_node_id"] = table.df["listen_node_id"].apply(lambda x: index[x]) + if table.df.index.name == "node_id": + table.df.index = table.df.reset_index("node_id")["node_id"].apply(lambda x: index[x]) + + # table.df.index.name = "node_id" + except KeyError as e: + raise KeyError(f"node_id {e} not in {node_type} / {attr} not in node-table") return model From 15022e4a4e3c7aafdbaa9e3b8486231f289a19ae Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Tue, 1 Oct 2024 14:34:02 +0200 Subject: [PATCH 08/23] documentatie (#161) @visr for now I use md-files as I cannot preview qmd-files in VSCode. Please advice how to structure png-files (resources) and documentation-files. --------- Co-authored-by: Martijn Visser --- docs/_quarto.yml | 8 +++++++ docs/workflow/level-controlled.qmd | 22 +++++++++++++++++++ docs/workflow/main-watersystem.qmd | 17 ++++++++++++++ .../Workflow_peilbeheerst_NL_LHM.txt | 15 ------------- 4 files changed, 47 insertions(+), 15 deletions(-) create mode 100644 docs/workflow/level-controlled.qmd create mode 100644 docs/workflow/main-watersystem.qmd delete mode 100644 src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt diff --git a/docs/_quarto.yml b/docs/_quarto.yml index d9c23e8..ac88fe7 100644 --- a/docs/_quarto.yml +++ b/docs/_quarto.yml @@ -9,11 +9,19 @@ website: left: - reference/index.qmd - cloudstorage.qmd + - text: "Workflows" + file: workflow/main-watersystem.qmd right: - icon: github href: https://github.com/Deltares/Ribasim-NL aria-label: GitHub + sidebar: + - title: "Workflows" + contents: + - workflow/main-watersystem.qmd + - workflow/level-controlled.qmd + format: html: theme: cosmo diff --git a/docs/workflow/level-controlled.qmd b/docs/workflow/level-controlled.qmd new file mode 100644 index 0000000..e27870c --- /dev/null +++ b/docs/workflow/level-controlled.qmd @@ -0,0 +1,22 @@ +--- +title: "Level controlled" +--- + +Workflow for deriving level controlled (peilbeheerst) regional models. +All code can be found under [`src/peilbeheerst_model`](https://github.com/Deltares/Ribasim-NL/tree/main/src/peilbeheerst_model). +The paths below are relative to this path. + +1. Run the preprocessing notebooks. One notebook per water board, path: `peilbeheerst_model/preprocess_data/` +2. Run the postprocessing notebook. One notebook per water board, path: `peilbeheerst_model/postprocess_data/` +3. Run the crossings notebook. One notebook, path: `01_test_parse_crossings.ipynb` +4. Run shortest paths notebooks. One notebook per water board, path: `Shortest_path/` +5. Run crossings to Ribasim notebook. One notebook, all water boards are below each other, path: `02_crossings_to_ribasim_notebook.ipynb` +6. Run parametrize notebooks. One notebook per water board, for now only Amstel, Gooi en Vecht (AGV), path: `Parametrize/AmstelGooienVecht_parametrize.ipynb` + +We originally had more parametrize notebooks, but because so much has changed I have now saved these in our backup. +We will only use these to see if there were any additional manual adjustments. +For the rest, it follows the same workflow as `AmstelGooienVecht_parametrize.ipynb`. + +Finally: step 1 started with a clear notebook per water board. +During the process of 1.5 years, these notebooks have become increasingly larger and more confusing, whereby not every line is needed anymore. +For now, there is no priority to clean this up, partly because this is a major risk that the data will (unintentionally) change, which will change the networks and the feedback forms can no longer be used. diff --git a/docs/workflow/main-watersystem.qmd b/docs/workflow/main-watersystem.qmd new file mode 100644 index 0000000..6a87751 --- /dev/null +++ b/docs/workflow/main-watersystem.qmd @@ -0,0 +1,17 @@ +--- +title: "Main watersystem" +--- + +All files are in [`notebooks/rijkswaterstaat`](https://github.com/Deltares/Ribasim-NL/tree/main/notebooks/rijkswaterstaat) are numbered in order of execution. + +# 1_bathymetrie.py +In this script we merge data from Baseline (version j23_6-v1, non-public) with grids from [bathymetrie-Nederland](https://maps.rijkswaterstaat.nl/geoweb55/index.html?viewer=Bathymetrie_Nederland) to one 5x5m GeoTiff. + +We combine both sources as the Midden Limburgse en Noord Brabantse (MLNB) kanalen are not in Baseline and the IJsselmeer is not completely covered in bathymetrie-Nederland. +For the mask we use Top10NL waterdelen and manually filled a boolean column `baseline`. + +::: {layout-ncol=2} +![Bathymetry mask](https://s3.deltares.nl/ribasim/doc-image/ribasim-nl/bathymetrie_mask.png) + +![Bathymetry](https://s3.deltares.nl/ribasim/doc-image/ribasim-nl/bathymetrie_result.png) +::: diff --git a/src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt b/src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt deleted file mode 100644 index 3557561..0000000 --- a/src/peilbeheerst_model/Workflow_peilbeheerst_NL_LHM.txt +++ /dev/null @@ -1,15 +0,0 @@ -Stappenplan ontwikkelen code peilbeheerst NL: -1) Doorloop de preprocessing notebooks. Notebook per waterschap, locatie: peilbeheerst_model/peilbeheerst_model/preprocess_data -2) Doorloop de post processing notebook. Notebook per waterschap, locatie: peilbeheerst_model/peilbeheerst_model/postprocess_data -3) Doorloop de crossings notebook. Eén notebook, locatie: peilbeheerst_model/01_test_parse_crossings.ipynb -4) Doorloop shortest paths notebookS. Notebook per waterschap, locatie: peilbeheerst_model/Shortest_path -5) Doorloop crossings to Ribasim notebook. Eén notebook, alle waterschappen staan onder elkaar, locatie: peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb -6) Doorloop parametrize notebookS. Notebook per waterschap, voor nu alleen nog AGV. Locatie: peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb - -We hadden oorspronkelijk meer parametrize notebooks, maar omdat zo veel is veranderd heb ik deze nu opgeslagen in onze back up. -Deze gaan we alleen gebruiken om te kijken of er nog extra handmatige aanpassingen waren. -Voor de rest gaat het dezelfde workflow volgens als AGV_parametrize. - -Tot slot: stap 1 begon met een overzichtelijk notebook per waterschap. Gedurende het proces van 1.5 jaar zijn deze notebooks steeds groter en onoverzichtelijker geworden, -waarbij niet elke regel meer nodig is. Voor nu ligt er geen prioriteit om dit op te schonen, mede omdat dit een groot risico dat de data (onbedoeld) wijzigt, -waardoor de netwerken zullen veranderen en de feedback formulieren niet meer gebruikt kunnen worden. From 6f39f9594860b832bea787648cc7ca696debefb7 Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Thu, 10 Oct 2024 14:52:30 +0200 Subject: [PATCH 09/23] Upgrade to ribasim v2024.11.0 (#164) https://github.com/Deltares/Ribasim/releases/tag/v2024.11.0 Also updates the rest of our dependencies. --- pixi.lock | 9039 +++++++++++++++++++++++++++-------------------------- pixi.toml | 2 +- 2 files changed, 4577 insertions(+), 4464 deletions(-) diff --git a/pixi.lock b/pixi.lock index 36233b7..2a4e83e 100644 --- a/pixi.lock +++ b/pixi.lock @@ -12,7 +12,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.12-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda @@ -21,26 +21,26 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.29-h03582ad_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-h57bd9a3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.8-h5e77a74_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-hc2627b9_9.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h01636a3_19.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-h191b246_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h29ce20c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h5e77a74_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h4e6ae90_11.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-h02abb05_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.6-h834ce55_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h29c84ef_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h756ea98_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-h3e6eb3e_6.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h9f1560d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.7.0-h10ac4d7_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.11.0-h325d260_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.9.0-hd126650_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h1d30c4a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-ha3822c6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-h0f25b8a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.18.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda @@ -50,7 +50,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.1-heb4867d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -60,7 +60,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 @@ -68,91 +68,92 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.2-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py312h98912ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dart-sass-1.58.3-ha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312h2ec8cdc_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.6-py312h2ec8cdc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-1.41.0-hfc7925d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-dom-0.1.35-hd9586b0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.23.1-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.24.0-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.0-py312h5aa26c2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py312h5aa26c2_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.2-he02047a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-hf7fa9e8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-h77b800c_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.7.2-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda @@ -164,105 +165,107 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-23_linux64_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h364f349_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-23_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.1-default_hb5137d0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.1-default_h9c6a7e4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.0-hbbe4b11_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hbd1db40_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h151b34b_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h151b34b_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hd5b9bfb_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h5e77dd0_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.1-h2ff4ddf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.29.0-h438788a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.29.0-h0121fbd_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.65.5-hf5c653b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-23_linux64_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm18-18.1.8-h8b73ec9_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.1-ha7bfdaf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_20_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.27.5-h5b01275_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.11.01-hbbce691_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_11.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h46a8edc_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312h854627b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.1-py312h178313f_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 @@ -273,8 +276,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h70512c7_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-ha479ceb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda @@ -284,19 +287,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.105-hd34e28f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.2-py312h58c1407_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h690cf93_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.2-py312h1d6d2e6_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.3.241009-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2-ha770c72_0.conda @@ -310,68 +314,68 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.3-py312h12e396e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.4-py312h12e396e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h5aa26c2_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.10.0-py312he8b4914_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312h91f0f75_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.3-py312h91f0f75_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.57-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.4.1-py312h8456570_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.11.01-h77b4e00_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.11.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.4-py312hd18ad41_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.2-h7b32b05_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.9-py312hd18ad41_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.4-h1380c3d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda @@ -387,32 +391,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hedb9d39_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/typst-0.11.0-he8a937b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.2-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.3-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda @@ -420,41 +424,39 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h666cd97_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.42-h4ab18f5_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-hb711507_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-h4bc722e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-h4bc722e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-recordproto-1.14.2-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_6.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl @@ -465,7 +467,7 @@ environments: osx-64: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda @@ -475,26 +477,26 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.29-h2dfa2de_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hb28a666_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hf6f7cdd_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.8-h2f86973_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_9.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-he4b61a0_19.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.5-h915d0f8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hcd1ed9e_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-h2f86973_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_11.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h9d7d61c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.6-hd01826e_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-h8128ea2_11.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.2-h27d4fa7_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-h7a58a96_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h8128ea2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-hef75ebe_6.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-h2e282c2_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.13.0-hf8dbe3c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.8.0-h60298e3_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.12.0-h646f05d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.7.0-hf91904f_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.11.0-h14965f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.9.0-h60298e3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h2259716_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-hdeff353_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h2123174_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.18.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/blosc-1.21.6-h7d75f6d_0.conda @@ -504,7 +506,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312h5861a67_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.1-h44e7173_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -514,7 +516,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.1-py312hf857d28_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-ha105788_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 @@ -522,87 +524,88 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.2-py312h3d0f464_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-0.12.3-py312h41838bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cyrus-sasl-2.1.27-hf9bab2b_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-1.0.0-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/dart-sass-1.58.3-h694c41f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h5861a67_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.6-py312h5861a67_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-1.41.0-h86af993_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-dom-0.1.35-h08cba0f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.23.1-h694c41f_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.24.0-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.3-hac325c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.0-py312hcd3578f_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.1-py312hcd3578f_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/fmt-11.0.2-h3c5361c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/fontconfig-2.14.2-h5bb23bf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.54.1-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/freetype-2.12.1-h60636b9_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/freexl-2.0.0-h3ec172f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/geos-3.12.2-hf036a51_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/geotiff-1.7.3-h4bbec01_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hb1e8313_1004.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-64/geos-3.13.0-hac325c4_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/geotiff-1.7.3-h2b6e260_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hac325c4_1005.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/giflib-5.2.2-h10d778d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/glog-0.7.1-h2790a97_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf4-4.2.15-h8138101_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf5-1.14.3-nompi_h687a608_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/icu-75.1-h120a0e1_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh57ce528_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.17-h6253ea5_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.18-hc62ec3d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/jupyter_core-5.7.2-py312hb401068_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda @@ -614,47 +617,47 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/krb5-1.21.3-h37d8d59_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lcms2-2.16-ha2f27b4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lerc-4.0.0-hb486fe8_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240116.2-cxx17_hf036a51_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240722.0-cxx17_hac325c4_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libaec-1.1.3-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.4-h20e244c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-ha60c65e_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h74c0fbd_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_20_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcrc32c-1.1.2-he49afe7_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.0-h58e7537_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.21-hfdf4475_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.1-h58e7537_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.1-hf95d169_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.22-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libevent-2.1.12-ha90c15b_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.3-hac325c4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h26ecb72_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7ffd8cf_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7ffd8cf_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-hba79287_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7313820_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7313820_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-13_2_0_h97931a8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-13.2.0-h2873a65_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.80.3-h736d271_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.28.0-h721cda5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.28.0-h9e84e37_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.62.2-h384b2fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.1-h63bbcf2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.29.0-hade041e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.29.0-h8126ed0_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.65.5-hb88832f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.17-hd75f5a5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.22.5-hdfe23c8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libjpeg-turbo-3.0.0-h0dc2134_1.conda @@ -663,36 +666,38 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libllvm14-14.0.6-hc8e404f_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnetcdf-4.9.2-nompi_h7334405_114.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libntlm-1.4-h0d85af4_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libopenblas-0.3.27-openmp_h8869122_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.43-h92b6c6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h75a757a_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-4.25.3-h4e4d658_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.09.01-h81f5012_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-he2ba7a0_16.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.44-h4b8f8c9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-h6e894e8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-5.27.5-h62b0dff_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.11.01-hd530cb8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-hdfb80b9_17.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.20-hfdf4475_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hdc25a2c_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hc43c327_11.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.1-h4b8f8c9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.0-hd019ec5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h75589b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.6.0-h603087a_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.21.0-h75589b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.7.0-h583c2ba_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libutf8proc-2.8.0-hb7f2c08_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libwebp-base-1.4.0-h10d778d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h00291cd_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.17.0-hf1f96e2_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libxml2-2.12.7-heaf3512_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.10.1-hc158999_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-18.1.8-h15ab845_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libxslt-1.1.39-h03b04e6_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.11.1-h3116616_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.1-h545e0da_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hcc8fd36_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h83408cd_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-c-1.9.4-hf0c8a7f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lzo-2.10-h10d778d_1001.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h0d5aeb7_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.1-py312hca98d7e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h30cc4df_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 @@ -712,19 +717,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.104-h3135457_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.105-h3135457_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py312hc3b515d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.2-py312he4d506f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openjpeg-2.5.2-h7310d3a_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openldap-2.6.8-hcd2896d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h732d5f6_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.2-hd23fc13_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h22b2039_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h52ea4d3_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.2-py312h1171441_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.3-py312h98e817e_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.3.241009-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2-h694c41f_0.conda @@ -738,67 +744,67 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.43.4-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h65860a0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h4b98a8f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.4.1-hf92c781_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h13864a0_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.5.0-h70d2bda_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-h00291cd_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-17.0.0-py312h0be7463_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-core-17.0.0-py312h63b501a_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.3-py312h669792a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.4-py312h669792a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312hab44e94_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312hab44e94_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312hcd3578f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.10.0-py312h8f0a83f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312haf32e09_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.7.0-py312h9673cc4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.7-h8f8b54e_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/qhull-2020.2-h3c5361c_5.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.57-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.11-py312h4104ae2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.09.01-hb168e87_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.4.1-py312h89b8ddc_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.11.01-h2fb0a26_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.11.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312h669792a_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.4-py312he6c0bb9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.9-py312he6c0bb9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.2-py312h9d777eb_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h8047845_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h4ff98d2_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/snappy-1.2.1-he1e6707_0.conda @@ -814,50 +820,50 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.0-h313d0e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-h1b23fdf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/typst-0.11.0-h11a7dfb_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024b-h00291cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312hc5c4d5f_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/uriparser-0.9.8-h6aefe2f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.2-py312hb553811_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.3-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-hfb503d4_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h0dc2134_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-h197e74d_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h00291cd_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.5-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hb33e954_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-he4ceba3_6.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-h87427d6_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-hd23fc13_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h7122b0e_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.6-h915ae27_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl @@ -866,9 +872,10 @@ environments: - pypi: src/peilbeheerst_model - pypi: src/ribasim_nl win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda @@ -877,25 +884,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.29-hf1f9119_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hce3b56f_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hb6a8f00_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.8-heca9ddf_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h4d6445f_19.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.5-h184cd82_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hd0ca3c1_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-heca9ddf_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_11.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-hf27581b_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.6-h56e9fbd_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-hf1fc857_11.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.2-hcae1b89_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h76bae87_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-hf1fc857_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h26f7782_6.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h25dd3c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.13.0-haf5610f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.8.0-h148e6f0_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.12.0-hf03c1c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.7.0-h148e6f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.9.0-h148e6f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-ha77ad07_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-h148e6f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.18.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-h85f69ea_0.conda @@ -905,7 +912,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.1-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -915,7 +922,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-hc2ea260_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 @@ -923,67 +930,68 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.2-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.7-py312hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.3-py312he70551f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-1.0.0-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/dart-sass-1.58.3-h57928b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.6-py312h275cf98_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/deno-1.41.0-h1f5608b_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/deno-dom-0.1.35-h8b8d39b_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/double-conversion-3.3.0-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.23.1-h57928b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.24.0-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.3-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.0-py312hd215820_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.1-py312hd215820_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/fmt-11.0.2-h7f575de_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.2-hbde0cde_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.54.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-hdaf720e_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/freexl-2.0.0-h8276f4a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.12.2-h5a68840_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h232476a_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.13.0-h5a68840_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h496ac4d_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-9.0.0-h2bedf89_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h5557f11_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/hdf5-1.14.3-nompi_h2b43c12_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/icu-75.1-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda @@ -991,7 +999,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2024.2.1-h57928b3_1083.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh7428d3b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda @@ -999,11 +1007,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/jupyter_core-5.7.2-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh5737063_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda @@ -1015,84 +1023,81 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lcms2-2.16-h67d730c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240116.2-cxx17_he0c23c2_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240722.0-cxx17_he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.4-haf234dc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h29daf90_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-23_win64_mkl.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h297d146_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-23_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-24_win64_mkl.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.1-default_ha5278ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.0-h1ee3ff0_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.21-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.1-h1ee3ff0_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.22-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.3-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h2fd8da2_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.80.3-h7025463_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.28.0-h5e7cea3_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.28.0-he5eb982_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.62.2-h5273850_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.11.1-default_h8125262_1000.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.1.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h042995d_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-hfaa227e_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-hfaa227e_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.1-h7025463_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.1.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.29.0-ha00044d_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.29.0-he5eb982_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.65.5-ha20e22e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-hcfcfb64_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-3.0.0-hcfcfb64_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-23_win64_mkl.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h92078aa_114.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.43-h19919ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-4.25.3-h503648d_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.09.01-hf8d8778_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-h6c42fcb_16.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.44-h3ca93ac_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-5.27.5-hcaed137_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.11.01-h4eb7d71_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-hd4c2148_17.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-hab0cb6d_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-h939089a_11.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libssh2-1.11.0-h7dfc565_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-hbe90ef8_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.6.0-hb151862_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.21.0-hbe90ef8_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.7.0-hfc51747_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libutf8proc-2.8.0-h82a8f57_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.4.0-hcfcfb64_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-h013a479_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libwinpthread-12.0.0.r4.gg4f2fc60ca-h57928b3_8.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.17.0-h0e4246c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxml2-2.12.7-h0f24e4e_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxslt-1.1.39-h3df6e99_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.10.1-h1d365fa_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.11.1-h25f2845_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h0608a1d_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.4-hcfcfb64_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.1-py312h31fea79_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 @@ -1100,7 +1105,6 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.1.0-h66d3029_694.conda - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda @@ -1113,17 +1117,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py312hcccf92d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.2-py312h49bc9c5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.2-h3d672ee_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.2-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h784c2ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h1c5a4bf_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.2-py312h72972c8_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.3-py312h72972c8_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.3.241009-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2-h57928b3_0.conda @@ -1136,68 +1140,67 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.43.4-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h9415970_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.4.1-hd9569ee_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.5.0-hd9569ee_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/pthreads-win32-2.9.1-hfa6e2cd_3.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-h0e40799_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-17.0.0-py312h7e22eef_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-17.0.0-py312h6a9c419_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.3-py312h2615798_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.4-py312h2615798_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.10.0-py312h8705084_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.7.0-py312ha24589b_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.3-py312h2ee7485_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.7-hce54a09_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-306-py312h53d5487_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.3-hfb098fa_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.57-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.11-py312he4a2ebf_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.09.01-hd3b24a8_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.4.1-py312h1701b51_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.11.01-hd3b24a8_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.10.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.11.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.4-py312h881003e_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.9-py312h881003e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.2-py312h816cc57_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h3a88d77_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h0c580ee_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/snappy-1.2.1-h23299a8_0.conda @@ -1210,59 +1213,59 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-hc790b64_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.7.0-h91493d7_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.0-h98a567f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h34a6a78_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/typst-0.11.0-h975169c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312hd5eb7cc_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/uriparser-0.9.8-h5a68840_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_21.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_21.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_21.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.2-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_22.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_22.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_22.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.3-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyh7428d3b_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-hcd874cb_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-h0e40799_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.5-h0e40799_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-ha9f60a1_6.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl @@ -1282,7 +1285,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.12-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda @@ -1291,26 +1294,26 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.29-h03582ad_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-h57bd9a3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.8-h5e77a74_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-hc2627b9_9.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h01636a3_19.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-h191b246_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h29ce20c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h5e77a74_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h4e6ae90_11.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-h02abb05_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.6-h834ce55_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h29c84ef_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h756ea98_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-h3e6eb3e_6.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h9f1560d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.7.0-h10ac4d7_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.11.0-h325d260_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.9.0-hd126650_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h1d30c4a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-ha3822c6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-h0f25b8a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.18.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda @@ -1320,7 +1323,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.1-heb4867d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -1330,7 +1333,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 @@ -1338,91 +1341,92 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.2-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py312h98912ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dart-sass-1.58.3-ha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312h2ec8cdc_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.6-py312h2ec8cdc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-1.41.0-hfc7925d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-dom-0.1.35-hd9586b0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.23.1-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.24.0-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.0-py312h5aa26c2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py312h5aa26c2_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.2-he02047a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-hf7fa9e8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-h77b800c_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.7.2-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda @@ -1434,105 +1438,107 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-23_linux64_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h364f349_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-23_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.1-default_hb5137d0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.1-default_h9c6a7e4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.0-hbbe4b11_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hbd1db40_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h151b34b_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h151b34b_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hd5b9bfb_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h5e77dd0_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.1-h2ff4ddf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.29.0-h438788a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.29.0-h0121fbd_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.65.5-hf5c653b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-23_linux64_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm18-18.1.8-h8b73ec9_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.1-ha7bfdaf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_20_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.27.5-h5b01275_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.11.01-hbbce691_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_11.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h46a8edc_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312h854627b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.1-py312h178313f_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 @@ -1543,8 +1549,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h70512c7_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-ha479ceb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda @@ -1554,19 +1560,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.105-hd34e28f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.2-py312h58c1407_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h690cf93_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.2-py312h1d6d2e6_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.3.241009-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2-ha770c72_0.conda @@ -1580,67 +1587,67 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.3-py312h12e396e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.4-py312h12e396e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h5aa26c2_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.10.0-py312he8b4914_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312h91f0f75_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.3-py312h91f0f75_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.57-ha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.4.1-py312h8456570_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.11.01-h77b4e00_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.4-py312hd18ad41_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.2-h7b32b05_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.9-py312hd18ad41_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.4-h1380c3d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda @@ -1656,32 +1663,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hedb9d39_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/typst-0.11.0-he8a937b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.2-py312h7900ff3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.3-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda @@ -1689,41 +1696,39 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h666cd97_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.42-h4ab18f5_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-hb711507_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-h4bc722e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-h4bc722e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-recordproto-1.14.2-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_6.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl @@ -1735,7 +1740,7 @@ environments: osx-64: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda @@ -1745,26 +1750,26 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.29-h2dfa2de_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hb28a666_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hf6f7cdd_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.8-h2f86973_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_9.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-he4b61a0_19.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.5-h915d0f8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hcd1ed9e_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-h2f86973_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_11.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h9d7d61c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.6-hd01826e_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-h8128ea2_11.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.2-h27d4fa7_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-h7a58a96_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h8128ea2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-hef75ebe_6.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-h2e282c2_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.13.0-hf8dbe3c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.8.0-h60298e3_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.12.0-h646f05d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.7.0-hf91904f_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.11.0-h14965f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.9.0-h60298e3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h2259716_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-hdeff353_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h2123174_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.18.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/blosc-1.21.6-h7d75f6d_0.conda @@ -1774,7 +1779,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312h5861a67_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.1-h44e7173_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -1784,7 +1789,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.1-py312hf857d28_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-ha105788_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 @@ -1792,87 +1797,88 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.2-py312h3d0f464_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-0.12.3-py312h41838bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cyrus-sasl-2.1.27-hf9bab2b_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-1.0.0-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/dart-sass-1.58.3-h694c41f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h5861a67_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.6-py312h5861a67_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-1.41.0-h86af993_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-dom-0.1.35-h08cba0f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.23.1-h694c41f_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.24.0-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.3-hac325c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.0-py312hcd3578f_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.1-py312hcd3578f_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/fmt-11.0.2-h3c5361c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/fontconfig-2.14.2-h5bb23bf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.54.1-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/freetype-2.12.1-h60636b9_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/freexl-2.0.0-h3ec172f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/geos-3.12.2-hf036a51_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/geotiff-1.7.3-h4bbec01_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hb1e8313_1004.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-64/geos-3.13.0-hac325c4_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/geotiff-1.7.3-h2b6e260_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hac325c4_1005.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/giflib-5.2.2-h10d778d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/glog-0.7.1-h2790a97_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf4-4.2.15-h8138101_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf5-1.14.3-nompi_h687a608_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/icu-75.1-h120a0e1_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh57ce528_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.17-h6253ea5_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.18-hc62ec3d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/jsonpointer-3.0.0-py312hb401068_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/jupyter_core-5.7.2-py312hb401068_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda @@ -1884,47 +1890,47 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/krb5-1.21.3-h37d8d59_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lcms2-2.16-ha2f27b4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lerc-4.0.0-hb486fe8_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240116.2-cxx17_hf036a51_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240722.0-cxx17_hac325c4_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libaec-1.1.3-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.4-h20e244c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-ha60c65e_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_13_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h74c0fbd_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_20_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcrc32c-1.1.2-he49afe7_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.0-h58e7537_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.21-hfdf4475_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.1-h58e7537_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.1-hf95d169_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.22-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libevent-2.1.12-ha90c15b_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.3-hac325c4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h26ecb72_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7ffd8cf_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7ffd8cf_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-hba79287_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7313820_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7313820_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-13_2_0_h97931a8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-13.2.0-h2873a65_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.80.3-h736d271_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.28.0-h721cda5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.28.0-h9e84e37_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.62.2-h384b2fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.1-h63bbcf2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.29.0-hade041e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.29.0-h8126ed0_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.65.5-hb88832f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.17-hd75f5a5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.22.5-hdfe23c8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libjpeg-turbo-3.0.0-h0dc2134_1.conda @@ -1933,36 +1939,38 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libllvm14-14.0.6-hc8e404f_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnetcdf-4.9.2-nompi_h7334405_114.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libntlm-1.4-h0d85af4_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libopenblas-0.3.27-openmp_h8869122_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.43-h92b6c6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h75a757a_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-4.25.3-h4e4d658_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.09.01-h81f5012_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-he2ba7a0_16.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.44-h4b8f8c9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-h6e894e8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-5.27.5-h62b0dff_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.11.01-hd530cb8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-hdfb80b9_17.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.20-hfdf4475_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hdc25a2c_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hc43c327_11.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.1-h4b8f8c9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.0-hd019ec5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h75589b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.6.0-h603087a_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.21.0-h75589b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.7.0-h583c2ba_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libutf8proc-2.8.0-hb7f2c08_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libwebp-base-1.4.0-h10d778d_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h00291cd_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.17.0-hf1f96e2_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libxml2-2.12.7-heaf3512_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.10.1-hc158999_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-18.1.8-h15ab845_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libxslt-1.1.39-h03b04e6_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.11.1-h3116616_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.1-h545e0da_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hcc8fd36_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h83408cd_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-c-1.9.4-hf0c8a7f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/lzo-2.10-h10d778d_1001.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h0d5aeb7_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.1-py312hca98d7e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h30cc4df_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 @@ -1982,19 +1990,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.104-h3135457_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.105-h3135457_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py312hc3b515d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.2-py312he4d506f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openjpeg-2.5.2-h7310d3a_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openldap-2.6.8-hcd2896d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openpyxl-3.1.5-py312h732d5f6_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.2-hd23fc13_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h22b2039_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h52ea4d3_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.2-py312h1171441_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.3-py312h98e817e_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.3.241009-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.2-h694c41f_0.conda @@ -2008,66 +2017,66 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.43.4-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h65860a0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h4b98a8f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.4.1-hf92c781_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h13864a0_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.5.0-h70d2bda_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-h00291cd_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-17.0.0-py312h0be7463_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-core-17.0.0-py312h63b501a_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.3-py312h669792a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.4-py312h669792a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312hab44e94_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312hab44e94_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312hcd3578f_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.10.0-py312h8f0a83f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312haf32e09_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.7.0-py312h9673cc4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.7-h8f8b54e_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/qhull-2020.2-h3c5361c_5.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.57-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.11-py312h4104ae2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.09.01-hb168e87_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.4.1-py312h89b8ddc_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.11.01-h2fb0a26_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312h669792a_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.4-py312he6c0bb9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.9-py312he6c0bb9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.2-py312h9d777eb_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h8047845_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h4ff98d2_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/simplejson-3.19.3-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/snappy-1.2.1-he1e6707_0.conda @@ -2083,50 +2092,50 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.0-h313d0e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-h1b23fdf_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.1-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/typst-0.11.0-h11a7dfb_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tzcode-2024b-h00291cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py312hc5c4d5f_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/uriparser-0.9.8-h6aefe2f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.2-py312hb553811_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.3-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/wrapt-1.16.0-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-hfb503d4_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h0dc2134_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-h197e74d_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h00291cd_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.5-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hb33e954_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-he4ceba3_6.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-h87427d6_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-hd23fc13_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py312h7122b0e_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.6-h915ae27_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl @@ -2136,9 +2145,10 @@ environments: - pypi: src/peilbeheerst_model - pypi: src/ribasim_nl win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda @@ -2147,25 +2157,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.29-hf1f9119_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hce3b56f_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hb6a8f00_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.8-heca9ddf_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h4d6445f_19.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.5-h184cd82_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hd0ca3c1_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-heca9ddf_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_11.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-hf27581b_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.6-h56e9fbd_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-hf1fc857_11.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.2-hcae1b89_4.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h76bae87_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-hf1fc857_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h26f7782_6.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h25dd3c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.13.0-haf5610f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.8.0-h148e6f0_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.12.0-hf03c1c4_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.7.0-h148e6f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.9.0-h148e6f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-ha77ad07_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-h148e6f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.18.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-h85f69ea_0.conda @@ -2175,7 +2185,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.1-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -2185,7 +2195,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-hc2ea260_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 @@ -2193,67 +2203,68 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.2-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.7-py312hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.3-py312he70551f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-1.0.0-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/dart-sass-1.58.3-h57928b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.6-py312h275cf98_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/deno-1.41.0-h1f5608b_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/deno-dom-0.1.35-h8b8d39b_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/double-conversion-3.3.0-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.23.1-h57928b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.24.0-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/expat-2.6.3-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.0-py312hd215820_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.1-py312hd215820_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/fmt-11.0.2-h7f575de_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.2-hbde0cde_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.54.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-hdaf720e_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/freexl-2.0.0-h8276f4a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_2.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geocube-0.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.12.2-h5a68840_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h232476a_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.13.0-h5a68840_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h496ac4d_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-9.0.0-h2bedf89_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h5557f11_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/hdf5-1.14.3-nompi_h2b43c12_105.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/icu-75.1-he0c23c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda @@ -2261,7 +2272,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2024.2.1-h57928b3_1083.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh7428d3b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda @@ -2269,11 +2280,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py312h2e8e312_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/jupyter_core-5.7.2-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh5737063_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda @@ -2285,84 +2296,81 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lcms2-2.16-h67d730c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240116.2-cxx17_he0c23c2_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240722.0-cxx17_he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.4-haf234dc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h29daf90_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-23_win64_mkl.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h297d146_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-23_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-24_win64_mkl.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.1-default_ha5278ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.0-h1ee3ff0_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.21-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.1-h1ee3ff0_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.22-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.3-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h2fd8da2_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.80.3-h7025463_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.28.0-h5e7cea3_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.28.0-he5eb982_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.62.2-h5273850_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.11.1-default_h8125262_1000.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.1.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h042995d_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-hfaa227e_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-hfaa227e_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.1-h7025463_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.1.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.29.0-ha00044d_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.29.0-he5eb982_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.65.5-ha20e22e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-hcfcfb64_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-3.0.0-hcfcfb64_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-23_win64_mkl.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h92078aa_114.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_13_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.43-h19919ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-4.25.3-h503648d_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.09.01-hf8d8778_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-h6c42fcb_16.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.44-h3ca93ac_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-5.27.5-hcaed137_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.11.01-h4eb7d71_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-hd4c2148_17.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-hab0cb6d_9.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-h939089a_11.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libssh2-1.11.0-h7dfc565_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-hbe90ef8_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.6.0-hb151862_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.21.0-hbe90ef8_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.7.0-hfc51747_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libutf8proc-2.8.0-h82a8f57_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.4.0-hcfcfb64_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-h013a479_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libwinpthread-12.0.0.r4.gg4f2fc60ca-h57928b3_8.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.17.0-h0e4246c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxml2-2.12.7-h0f24e4e_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libxslt-1.1.39-h3df6e99_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.10.1-h1d365fa_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libzip-1.11.1-h25f2845_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.43.0-py312h1f7db74_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-4.3.3-py312h0608a1d_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.4-hcfcfb64_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.1-py312h31fea79_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2 @@ -2370,7 +2378,6 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.1.0-h66d3029_694.conda - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda @@ -2383,17 +2390,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py312hcccf92d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.2-py312h49bc9c5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.2-h3d672ee_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.5-py312he70551f_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.2-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h784c2ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h1c5a4bf_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.2-py312h72972c8_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.3-py312h72972c8_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.3.241009-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.2-h57928b3_0.conda @@ -2406,67 +2413,66 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.43.4-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/plum-dispatch-2.5.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h9415970_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.4.1-hd9569ee_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.5.0-hd9569ee_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/pthreads-win32-2.9.1-hfa6e2cd_3.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-h0e40799_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-17.0.0-py312h7e22eef_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-17.0.0-py312h6a9c419_1_cpu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.3-py312h2615798_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.4-py312h2615798_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.10.0-py312h8705084_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.7.0-py312ha24589b_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.3-py312h2ee7485_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.7-hce54a09_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-306-py312h53d5487_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.3-hfb098fa_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.57-h57928b3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.11-py312he4a2ebf_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.09.01-hd3b24a8_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.4.1-py312h1701b51_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.11.01-hd3b24a8_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.4-py312h881003e_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.9-py312h881003e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.2-py312h816cc57_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h3a88d77_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h0c580ee_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/simplejson-3.19.3-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/snappy-1.2.1-h23299a8_0.conda @@ -2479,59 +2485,59 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/sqlite-3.46.1-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-hc790b64_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.7.0-h91493d7_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.0-h98a567f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h34a6a78_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.1-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/typst-0.11.0-h975169c_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py312hd5eb7cc_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/uriparser-0.9.8-h5a68840_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_21.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_21.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_21.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.2-py312h2e8e312_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_22.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_22.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_22.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.3-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyh7428d3b_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/wrapt-1.16.0-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-hcd874cb_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-h0e40799_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.5-h0e40799_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-ha9f60a1_6.conda - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py312h7606c53_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.6-h0ea2cb4_0.conda - pypi: https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl @@ -2572,6 +2578,26 @@ packages: purls: [] size: 23621 timestamp: 1650670423406 +- kind: conda + name: _openmp_mutex + version: '4.5' + build: 2_gnu + build_number: 8 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda + sha256: 1a62cd1f215fe0902e7004089693a78347a30ad687781dfda2289cab000e652d + md5: 37e16618af5c4851a3f3d66dd0e11141 + depends: + - libgomp >=7.5.0 + - libwinpthread >=12.0.0.r2.ggc561118da + constrains: + - openmp_impl 9999 + - msys2-conda-epoch <0.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 49468 + timestamp: 1718213032772 - kind: conda name: affine version: 2.4.0 @@ -2624,28 +2650,29 @@ packages: timestamp: 1716290348421 - kind: conda name: anyio - version: 4.4.0 - build: pyhd8ed1ab_0 + version: 4.6.0 + build: pyhd8ed1ab_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.4.0-pyhd8ed1ab_0.conda - sha256: 84ac9429812495f12939ab4994f2634f7cacd254f6234a0c2c0243daed15a7ee - md5: 1fa97c6e8db1f82c64ff17a5efc4ae8e + url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda + sha256: d05493abca6ac1b0cb15f5d48c3117bddd73cc21e48bfcb460570cfa2ea2f909 + md5: bc13891a047f50728b03595531f7f92e depends: - exceptiongroup >=1.0.2 - idna >=2.8 - - python >=3.8 + - python >=3.9 - sniffio >=1.1 - typing_extensions >=4.1 constrains: - - uvloop >=0.17 - - trio >=0.23 + - uvloop >=0.21.0b1 + - trio >=0.26.1 license: MIT license_family: MIT purls: - pkg:pypi/anyio?source=hash-mapping - size: 104255 - timestamp: 1717693144467 + size: 108445 + timestamp: 1726931347728 - kind: conda name: appdirs version: 1.4.4 @@ -2855,60 +2882,57 @@ packages: timestamp: 1722977241383 - kind: conda name: aws-c-auth - version: 0.7.29 - build: h03582ad_1 - build_number: 1 + version: 0.7.31 + build: h57bd9a3_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.29-h03582ad_1.conda - sha256: 97379dd69b78e5b07a4a776bccb5835aa71f170912385e71ddba5cc93d9085dc - md5: 6d23dd1c1742112d5fe9f529da7afea9 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-h57bd9a3_0.conda + sha256: 7706d49b8011da81d5dc54e9bad06f67d43edb1ff2aa1dcc3dbc737d53d2a4ef + md5: 83be3b5e072d88b76841cc02c6dd458e depends: - __glibc >=2.17,<3.0.a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 107282 - timestamp: 1725868193209 + size: 107753 + timestamp: 1726544311370 - kind: conda name: aws-c-auth - version: 0.7.29 - build: h2dfa2de_1 - build_number: 1 + version: 0.7.31 + build: hb28a666_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.29-h2dfa2de_1.conda - sha256: a75f56a0d258a837f555c63a5d621e10497e6026c667b919a218038b9ad18647 - md5: e297a166392146d9e3fe3118550b9ff3 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hb28a666_0.conda + sha256: 51bf3047115f21c89e96999ec7a0c46a7684334ffe5a3584547a2e1f9e14ba2a + md5: d14e2cb987740374e14e871456356b76 depends: - __osx >=10.13 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 94284 - timestamp: 1725868368256 + size: 94230 + timestamp: 1726544392579 - kind: conda name: aws-c-auth - version: 0.7.29 - build: hf1f9119_1 - build_number: 1 + version: 0.7.31 + build: hce3b56f_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.29-hf1f9119_1.conda - sha256: 617b3aa9cea4d1107a0809e0bc85ed60a7c6095a4992af9c08e97492cc65fa56 - md5: 8f3aa5632a78884b7f788e9d0fee03f3 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hce3b56f_0.conda + sha256: b154854dc8b0c66bf7282da5668352a93f8d36e44936f8adb5bdabe519596e69 + md5: 49f9d09893f4356733ea584c1ef088ce depends: - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - ucrt >=10.0.20348.0 @@ -2917,8 +2941,8 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 102681 - timestamp: 1725868656049 + size: 102819 + timestamp: 1726544858712 - kind: conda name: aws-c-cal version: 0.7.4 @@ -3081,74 +3105,73 @@ packages: - kind: conda name: aws-c-event-stream version: 0.4.3 - build: h235a6dd_1 - build_number: 1 + build: h29ce20c_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda - sha256: 987b3654e7cbb8ead0227c2442a02b6c379d21bb1509a834c423d492a4862706 - md5: c05358e3a231195f7f0b3f592078bb0c + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h29ce20c_2.conda + sha256: ac5e04779811b29fc47e06d6bb9ea6436511216ea2871ad6917c3894174c5fa3 + md5: d533baa7e43239591d5cc0233849c475 depends: - __glibc >=2.17,<3.0.a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 + - aws-checksums >=0.1.20,<0.1.21.0a0 - libgcc >=13 - libstdcxx >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 53989 - timestamp: 1725856758424 + size: 54116 + timestamp: 1726327201288 - kind: conda name: aws-c-event-stream version: 0.4.3 - build: hb6a8f00_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hb6a8f00_1.conda - sha256: 774c763717e58f02333c7f716f126d8bc4d5c0b9659be7be392d7d5cd1aa1d42 - md5: 83e9cebf2e055e5495b9583601ca2d0c + build: hcd1ed9e_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hcd1ed9e_2.conda + sha256: f717b5d9baa8206d7069f80cd2f5a2cd99747b3f1e3aed4bea7e392846979d9b + md5: 73bde3fabf8b8f6f2be9cc6f152d0606 depends: + - __osx >=10.13 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - aws-checksums >=0.1.20,<0.1.21.0a0 + - libcxx >=17 license: Apache-2.0 license_family: Apache purls: [] - size: 54527 - timestamp: 1725857386993 + size: 46699 + timestamp: 1726327279325 - kind: conda name: aws-c-event-stream version: 0.4.3 - build: hf6f7cdd_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hf6f7cdd_1.conda - sha256: 3a86d81ece111acc080cab42df6afc5c272c4ee7495d8cda22c90fc54bb0f27e - md5: 6f1d1e8b410d31a11db29d802f21cb64 + build: hd0ca3c1_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hd0ca3c1_2.conda + sha256: be7815f98f210acc1e6cbac1d9a0cb05d6f91fe53c2dd62cab585c4da66359e3 + md5: 93704218ce07e4d961299e170ed430b6 depends: - - __osx >=10.13 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 - - libcxx >=17 + - aws-checksums >=0.1.20,<0.1.21.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 46628 - timestamp: 1725856844781 + size: 54331 + timestamp: 1726327493766 - kind: conda name: aws-c-http - version: 0.8.8 - build: h2f86973_2 - build_number: 2 + version: 0.8.10 + build: h2f86973_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.8-h2f86973_2.conda - sha256: ed4350ada258ea8127a1d6af681e109956c3258aeb3e7e81f9e3d03881e91c5e - md5: a4fa477bc4b23b11f5a8f6b0e3a9ca97 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-h2f86973_0.conda + sha256: dae6a6be9a6fc1c2d6318c62d2b89e20fe75d8df5a4b7766b95be788cfb9516c + md5: 4160f0e92d2f25532ee52b625556e488 depends: - __osx >=10.13 - aws-c-cal >=0.7.4,<0.7.5.0a0 @@ -3158,17 +3181,16 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 164110 - timestamp: 1725856460863 + size: 164396 + timestamp: 1726469195066 - kind: conda name: aws-c-http - version: 0.8.8 - build: h5e77a74_2 - build_number: 2 + version: 0.8.10 + build: h5e77a74_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.8-h5e77a74_2.conda - sha256: cef335beb17cd299024fae300653ae491c866f7c93287bdf44a9e9b4762b1a54 - md5: b75afaaf2a4ea0e1137ecb35262b8ed4 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h5e77a74_0.conda + sha256: 887af55b895502ef7611ad0dd5e19990385b05348262d6c5a8a22330490b14e7 + md5: 947cd303444ea92a382a10e43bad1a3f depends: - __glibc >=2.17,<3.0.a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 @@ -3179,17 +3201,16 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 197416 - timestamp: 1725856481663 + size: 197233 + timestamp: 1726469181157 - kind: conda name: aws-c-http - version: 0.8.8 - build: heca9ddf_2 - build_number: 2 + version: 0.8.10 + build: heca9ddf_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.8-heca9ddf_2.conda - sha256: 9f151008584d7eb58b1184ac83015a5f8bc8e82cc4fa1e69d660e6260f79f4bc - md5: fcfd389b611656e45860e8e91ac70088 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-heca9ddf_0.conda + sha256: 2d474df981675d8d4bef7b22485c76cbf05df6b65bb2ea3f07363ebc0f6ed34c + md5: efd3dc45770f91dcd4f3a82f50cbea53 depends: - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 @@ -3201,17 +3222,17 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 182376 - timestamp: 1725857088696 + size: 182262 + timestamp: 1726469702580 - kind: conda name: aws-c-io version: 0.14.18 - build: h3831a8d_9 - build_number: 9 + build: h3831a8d_11 + build_number: 11 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_9.conda - sha256: a861e2521e7c70322af6be848f7f9e9b984cfc7b4b6ec436340c08043d652ab2 - md5: 2b8d0c758a7cb3608b87f5aecdb152c7 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_11.conda + sha256: 1ce3c60df424a6e32b69697ce6c126e36cffc065f9dc31a82da5486166120f07 + md5: 297f12ac4c10e0597e760c3751bf4b52 depends: - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 @@ -3221,37 +3242,37 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 160801 - timestamp: 1725843427442 + size: 161125 + timestamp: 1728231479155 - kind: conda name: aws-c-io version: 0.14.18 - build: hc2627b9_9 - build_number: 9 + build: h4e6ae90_11 + build_number: 11 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-hc2627b9_9.conda - sha256: c39d321fb1b0388334f9a3fff1b867de624f455f3f01b7dba10b23bc040e8280 - md5: b1ba84c5cb2e6fe5f5cd1101097a4592 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h4e6ae90_11.conda + sha256: a03b3dfdf221592e17fdf4d4e96ecebfab7052e69bc22adc5eb68b2fc54200de + md5: 21fd3e17dab1b20a0acdbc8b406ee7af depends: - __glibc >=2.17,<3.0.a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - libgcc >=13 - - s2n >=1.5.2,<1.5.3.0a0 + - s2n >=1.5.4,<1.5.5.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 158670 - timestamp: 1725843016336 + size: 159269 + timestamp: 1728231235486 - kind: conda name: aws-c-io version: 0.14.18 - build: hf9a0f1c_9 - build_number: 9 + build: hf9a0f1c_11 + build_number: 11 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_9.conda - sha256: a089493c67ec9e000061920f5a2ef233f59911d474bc77dcec0f4fb9738750ab - md5: c67eee7b35a3fa7a186d65a604a4a01f + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_11.conda + sha256: a9683e05898d0622443b2bf0b38945a2599ebbd90b64d2640a21bd514d00ada5 + md5: ccab53307c565057ad2c954effab34ab depends: - __osx >=10.13 - aws-c-cal >=0.7.4,<0.7.5.0a0 @@ -3259,138 +3280,132 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 138424 - timestamp: 1725843066014 + size: 138446 + timestamp: 1728231289821 - kind: conda name: aws-c-mqtt - version: 0.10.4 - build: h01636a3_19 - build_number: 19 + version: 0.10.7 + build: h02abb05_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h01636a3_19.conda - sha256: f188f9127e12b2f90d68c5887f9742838528d8ea64c11e25c90e135cc1465326 - md5: 8ec16206ccaaf74ee5830ffeba436ebc + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-h02abb05_0.conda + sha256: dfc23a658ee659b0bf86545bd76d14710bfb6fb1457824b85e49a0e99b0aaea9 + md5: b442b985952afe5820da96bb976ee006 depends: - __glibc >=2.17,<3.0.a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 163865 - timestamp: 1725892070997 + size: 195051 + timestamp: 1728339436377 - kind: conda name: aws-c-mqtt - version: 0.10.4 - build: h4d6445f_19 - build_number: 19 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.4-h4d6445f_19.conda - sha256: 0dc65ecddda8d26390b2d1cb5db074739c74d47c94f5e0a3927f8431bd0912b5 - md5: edf26447a744762aa7ac8fe678e046ca + version: 0.10.7 + build: h9d7d61c_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h9d7d61c_0.conda + sha256: e17efadc9db5b4397f1a2ce8714bf60a2c5269764dd95000c2a2c97f28e663eb + md5: cfa8c785abedd8caaf6a58703d215c44 depends: + - __osx >=10.13 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 157732 - timestamp: 1725892612990 + size: 163926 + timestamp: 1728339489746 - kind: conda name: aws-c-mqtt - version: 0.10.4 - build: he4b61a0_19 - build_number: 19 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.4-he4b61a0_19.conda - sha256: 5d38c7493b28100b954ae1f7420e0876ad0209b99a84600de6d691a220f03e6e - md5: 3cacaf9254c92818cd32de10b3a7bafe + version: 0.10.7 + build: hf27581b_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-hf27581b_0.conda + sha256: 7ba0d682b81f8fdd5b66edf251026a5bfed3b2c51b6d53dbdb5d93985fe451c1 + md5: 7c1bb68151f9b81e1369bbcaa05a574e depends: - - __osx >=10.13 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 138915 - timestamp: 1725892131190 + size: 186759 + timestamp: 1728340065107 - kind: conda name: aws-c-s3 - version: 0.6.5 - build: h184cd82_2 - build_number: 2 + version: 0.6.6 + build: h56e9fbd_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.5-h184cd82_2.conda - sha256: 21ffdc5473041b92a5e581a775988cb59d5b1cbda707b63dc6fc28cefd3b8f25 - md5: df345266c40ab1a2ac3b79be8aa421a2 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.6-h56e9fbd_0.conda + sha256: 15c45a36c07cdbfbb5ec393e6b6d10d15a87df7d2dd87db9fa594b13a3359987 + md5: 0b301304eebf6697381350eb096bd1a5 depends: - - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 + - aws-checksums >=0.1.20,<0.1.21.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 108320 - timestamp: 1725882801691 + size: 108140 + timestamp: 1726722849474 - kind: conda name: aws-c-s3 - version: 0.6.5 - build: h191b246_2 - build_number: 2 + version: 0.6.6 + build: h834ce55_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-h191b246_2.conda - sha256: f43e6a308ae388e4a3968690ae8789e5cfb4d51c96d36a00c832a9067685b1d3 - md5: f8f40355dac7a75313d9c10de91330e7 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.6-h834ce55_0.conda + sha256: b5e921f2bca092eec7355e296292f84a3db6e37802be61c56bf865edc4246532 + md5: dbf33f245023697941d4ff6b996d2b2c depends: - __glibc >=2.17,<3.0.a0 - - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 + - aws-checksums >=0.1.20,<0.1.21.0a0 - libgcc >=13 - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 112780 - timestamp: 1725882305631 + size: 112595 + timestamp: 1726722460857 - kind: conda name: aws-c-s3 - version: 0.6.5 - build: h915d0f8_2 - build_number: 2 + version: 0.6.6 + build: hd01826e_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.5-h915d0f8_2.conda - sha256: b12778ac3bfa5574420472faee2944952c07067f1dc8cca832013edea1982b48 - md5: eb182c006b6eb87d523d51295c2e8050 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.6-hd01826e_0.conda + sha256: 24794cfcaf9d6da28192dda88449dad1e112408a96c51e5a0df6a1925c4e8a57 + md5: 7fa9af757e8376f738eb314518ec282b depends: - __osx >=10.13 - - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 + - aws-checksums >=0.1.20,<0.1.21.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 97417 - timestamp: 1725882369510 + size: 97414 + timestamp: 1726722505830 - kind: conda name: aws-c-sdkutils version: 0.1.19 @@ -3447,13 +3462,12 @@ packages: timestamp: 1725837225397 - kind: conda name: aws-checksums - version: 0.1.18 - build: h756ea98_11 - build_number: 11 + version: 0.1.20 + build: h756ea98_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda - sha256: c343bc670bdb52248fc039cbd1cba20fe1d18af81960ab43153d9b55dfb08bc1 - md5: eadcc12bedac44f13223a2909c0e5bcc + url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h756ea98_0.conda + sha256: 4b4543b0ca5528b6ca421f97394d7781a1d7d78b17ac3990d0fbe6a49159a407 + md5: ff7dbb319545f4bd1e5e0f8555cf9e7f depends: - __glibc >=2.17,<3.0.a0 - aws-c-common >=0.9.28,<0.9.29.0a0 @@ -3461,34 +3475,32 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 49962 - timestamp: 1725836852149 + size: 72784 + timestamp: 1726281973900 - kind: conda name: aws-checksums - version: 0.1.18 - build: h8128ea2_11 - build_number: 11 + version: 0.1.20 + build: h8128ea2_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.18-h8128ea2_11.conda - sha256: 37965af8d420d114a5d603d149b7e4ce353b119dffe90ec67c53895cb0e5c402 - md5: 45959482adbad4397bfedcdf262bbb32 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h8128ea2_0.conda + sha256: 1953103f0ead6ffbcb73ddbcec617ce9195010fea838f63b5e8e93b8d4bf4bcb + md5: 1fbab35b839a3d822f1b39680298fd9f depends: - __osx >=10.13 - aws-c-common >=0.9.28,<0.9.29.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 48884 - timestamp: 1725836961245 + size: 70818 + timestamp: 1726281979944 - kind: conda name: aws-checksums - version: 0.1.18 - build: hf1fc857_11 - build_number: 11 + version: 0.1.20 + build: hf1fc857_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.18-hf1fc857_11.conda - sha256: e2ef958fe20a98031d39489875e58eece6d5572257f46f5ffdacfed5e41f803e - md5: 5dbcecf902763167d36d9b1da70d4b5d + url: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-hf1fc857_0.conda + sha256: 446710cc7d12beddfe11bfd50a5d2a8f2418b66fb3a0a92a1a9031e041b101e9 + md5: 1b66a8719c94d85fa6658d8f46600f21 depends: - aws-c-common >=0.9.28,<0.9.29.0a0 - ucrt >=10.0.20348.0 @@ -3497,160 +3509,157 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 52209 - timestamp: 1725837389845 + size: 75478 + timestamp: 1726282558694 - kind: conda name: aws-crt-cpp - version: 0.28.2 - build: h27d4fa7_4 - build_number: 4 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.2-h27d4fa7_4.conda - sha256: ccdf92124ea1b0909164226b22932ad39ac80838d537ec960ed26f50f0680c7e - md5: 760a535c189a995ee99474027a87d1bb + version: 0.28.3 + build: h26f7782_6 + build_number: 6 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h26f7782_6.conda + sha256: d17c6a79a872d622656a8d6b54573d230030ef808fc5da2d3bbe5e60518d1c3c + md5: d59a961de1c3039c98f23e8f7532b31a depends: - - __osx >=10.13 - - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-c-mqtt >=0.10.4,<0.10.5.0a0 - - aws-c-s3 >=0.6.5,<0.6.6.0a0 + - aws-c-mqtt >=0.10.7,<0.10.8.0a0 + - aws-c-s3 >=0.6.6,<0.6.7.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - libcxx >=17 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 294389 - timestamp: 1725905017625 + size: 255207 + timestamp: 1728390584945 - kind: conda name: aws-crt-cpp - version: 0.28.2 - build: h29c84ef_4 - build_number: 4 + version: 0.28.3 + build: h3e6eb3e_6 + build_number: 6 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h29c84ef_4.conda - sha256: 1404b6fd34e6e0e6587b771d4d63800123e0712792982bc2bbb0d78eeca26a94 - md5: 81674a3f6a59966a9ffaaaf063c8c331 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-h3e6eb3e_6.conda + sha256: bf85c7ad2875771d29db7f65a346b1937fc6b4c7f44283b159e6f00c2dac7a2c + md5: a12a25457b517277e15228889e568daa depends: - __glibc >=2.17,<3.0.a0 - - aws-c-auth >=0.7.29,<0.7.30.0a0 + - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-c-mqtt >=0.10.4,<0.10.5.0a0 - - aws-c-s3 >=0.6.5,<0.6.6.0a0 + - aws-c-mqtt >=0.10.7,<0.10.8.0a0 + - aws-c-s3 >=0.6.6,<0.6.7.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - libgcc >=13 - libstdcxx >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 349192 - timestamp: 1725904799209 + size: 349909 + timestamp: 1728389760881 - kind: conda name: aws-crt-cpp - version: 0.28.2 - build: hcae1b89_4 - build_number: 4 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.2-hcae1b89_4.conda - sha256: 0e15511fb4fc3afa1ad5b08f75a508ea1a5ba85f68e0a7e621666104cda60673 - md5: 83ab71884fd2e42b68d0fae48fbcc2b0 + version: 0.28.3 + build: hef75ebe_6 + build_number: 6 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-hef75ebe_6.conda + sha256: 540af6454373d89636012578c1d35cffb6fcf997ebb242773f975c13cea3d0f8 + md5: 831c884adc08e9cb33671f5ae024da65 depends: - - aws-c-auth >=0.7.29,<0.7.30.0a0 + - __osx >=10.13 + - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - - aws-c-http >=0.8.8,<0.8.9.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-c-mqtt >=0.10.4,<0.10.5.0a0 - - aws-c-s3 >=0.6.5,<0.6.6.0a0 + - aws-c-mqtt >=0.10.7,<0.10.8.0a0 + - aws-c-s3 >=0.6.6,<0.6.7.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libcxx >=17 license: Apache-2.0 license_family: Apache purls: [] - size: 254847 - timestamp: 1725905205654 + size: 294267 + timestamp: 1728389843579 - kind: conda name: aws-sdk-cpp - version: 1.11.379 - build: h5a9005d_9 - build_number: 9 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda - sha256: cc2227d97f5e7aed68aeb274a2bec0236af5c20519bde200c8ea7cba114ec978 - md5: 5dc18b385893b7991a3bbeb135ad7c3e + version: 1.11.407 + build: h25dd3c2_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h25dd3c2_0.conda + sha256: 46337ac3bb24a6f8addeef0b642013989cf7efa2de5c1e12e2d7f62c5137549c + md5: b2d39f93aa57382367d6cacd55ec4f32 depends: - - __glibc >=2.17,<3.0.a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - libcurl >=8.9.1,<9.0a0 - - libgcc >=13 - - libstdcxx >=13 + - aws-checksums >=0.1.20,<0.1.21.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.2,<4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 2934257 - timestamp: 1725944617781 + size: 2805201 + timestamp: 1726639233904 - kind: conda name: aws-sdk-cpp - version: 1.11.379 - build: h76bae87_9 - build_number: 9 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.379-h76bae87_9.conda - sha256: ae01c01fd59d803dd95c97efa54221b93511c43cd1758b525d5b81bbbb5b8a82 - md5: 41de0c545f05649f3ce3a039b40a09b3 + version: 1.11.407 + build: h2e282c2_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-h2e282c2_0.conda + sha256: c6a88e7882325c56e1f25252a5529bd24650207ab44cb31e976daa9b5d8b1914 + md5: 16c5992e1c374f160128336cdf64e171 depends: + - __osx >=10.13 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 + - aws-checksums >=0.1.20,<0.1.21.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - libcurl >=8.10.0,<9.0a0 + - libcxx >=17 - libzlib >=1.3.1,<2.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 2765550 - timestamp: 1725945456988 + size: 2787167 + timestamp: 1726638546148 - kind: conda name: aws-sdk-cpp - version: 1.11.379 - build: h7a58a96_9 - build_number: 9 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.379-h7a58a96_9.conda - sha256: 65ceb0bfddbeaf3f242ad737e1ed4dba77ba1ebc4ce74a02d7fc276aa2df544d - md5: 9d700e1fee39399bf96abf6e66cdd92d + version: 1.11.407 + build: h9f1560d_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h9f1560d_0.conda + sha256: bc250a3879b88c13e91fc03abdca3867c5a0dd7767da5f364d4460f74d64f286 + md5: 5c3dd49b04db05e0e884de48ff77ae24 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - aws-c-common >=0.9.28,<0.9.29.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - - aws-checksums >=0.1.18,<0.1.19.0a0 - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - libcurl >=8.9.1,<9.0a0 - - libcxx >=17 + - aws-checksums >=0.1.20,<0.1.21.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - libcurl >=8.10.0,<9.0a0 + - libgcc >=13 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 2754435 - timestamp: 1725944826345 + size: 2935773 + timestamp: 1726638167995 - kind: conda name: azure-core-cpp version: 1.13.0 @@ -3708,13 +3717,12 @@ packages: timestamp: 1720853326346 - kind: conda name: azure-identity-cpp - version: 1.8.0 - build: h148e6f0_2 - build_number: 2 + version: 1.9.0 + build: h148e6f0_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.8.0-h148e6f0_2.conda - sha256: 1d5c52c0619d4ab1be47cd7958c5c9ecc327b0f5854ae0354b7c9cc60c73afe4 - md5: 83ec332c6f07f9e48c8d5706cceab962 + url: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.9.0-h148e6f0_0.conda + sha256: b049cf07cf19cf16110fc7ba1b7140251977fc5f2145119c68e121b8cebe34cc + md5: efc97c94c6cfeb84ca386d9e33c63c85 depends: - azure-core-cpp >=1.13.0,<1.13.1.0a0 - ucrt >=10.0.20348.0 @@ -3723,17 +3731,16 @@ packages: license: MIT license_family: MIT purls: [] - size: 383395 - timestamp: 1721777916149 + size: 392359 + timestamp: 1722992699567 - kind: conda name: azure-identity-cpp - version: 1.8.0 - build: h60298e3_2 - build_number: 2 + version: 1.9.0 + build: h60298e3_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.8.0-h60298e3_2.conda - sha256: 7bc11d77aab926aff437b6afc089fe937ab03b9f09d679520d4d4a91717b5337 - md5: 29dc05d3b825fd7e2efe0263621c2fdb + url: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.9.0-h60298e3_0.conda + sha256: 1b238982de2532a62a360857e517bbc19037a485fe5b6edaf3dd1e2f7cfe2c40 + md5: 5f61464a9a26242e99615778afc7502e depends: - __osx >=10.13 - azure-core-cpp >=1.13.0,<1.13.1.0a0 @@ -3742,17 +3749,16 @@ packages: license: MIT license_family: MIT purls: [] - size: 148019 - timestamp: 1721777648770 + size: 157809 + timestamp: 1722992283584 - kind: conda name: azure-identity-cpp - version: 1.8.0 - build: hd126650_2 - build_number: 2 + version: 1.9.0 + build: hd126650_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda - sha256: f85452eca3ae0e156b1d1a321a1a9f4f58d44ff45236c0d8602ab96aaad3c6ba - md5: 36df3cf05459de5d0a41c77c4329634b + url: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.9.0-hd126650_0.conda + sha256: 0a8e8ce57bc5b22b318e9ab229cb12d29f96245fba7d9618c1b612862b9c84e5 + md5: 470dd41567c68110f0ca2268cd434d8a depends: - __glibc >=2.17,<3.0.a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 @@ -3762,165 +3768,160 @@ packages: license: MIT license_family: MIT purls: [] - size: 199516 - timestamp: 1721777604325 + size: 213265 + timestamp: 1722992189687 - kind: conda name: azure-storage-blobs-cpp - version: 12.12.0 - build: h646f05d_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.12.0-h646f05d_0.conda - sha256: 7153e4ba0112246fc93b2b6631c17b1c2c4f7878f2c4a25426e38a78a0b4cd4c - md5: d3f572c8ebf9ad5cdc07558b3b2c27ce + version: 12.13.0 + build: h1d30c4a_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h1d30c4a_0.conda + sha256: 376b1daf96f066c0bca09b61faee182355b21802bcaa6ad7f0494c65888a0b5b + md5: 378dd9c6347c112d301421a00d6446a9 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 - - libcxx >=16 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 + - libgcc >=13 + - libstdcxx >=13 license: MIT license_family: MIT purls: [] - size: 423224 - timestamp: 1721865021128 + size: 550550 + timestamp: 1727385969150 - kind: conda name: azure-storage-blobs-cpp - version: 12.12.0 - build: hd2e3451_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda - sha256: 69a0f5c2a08a1a40524b343060debb8d92295e2cc5805c3db56dad7a41246a93 - md5: 61f1c193452f0daa582f39634627ea33 + version: 12.13.0 + build: h2259716_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h2259716_0.conda + sha256: 3ac5ace301e04a45ca2b27cc662d218b3dec07d68267af8fd36d617fcd59f833 + md5: 1146aa05e75cf261f615a34ef724d9d5 depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 + - libcxx >=17 license: MIT license_family: MIT purls: [] - size: 523120 - timestamp: 1721865032339 + size: 444010 + timestamp: 1727386034095 - kind: conda name: azure-storage-blobs-cpp - version: 12.12.0 - build: hf03c1c4_0 + version: 12.13.0 + build: ha77ad07_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.12.0-hf03c1c4_0.conda - sha256: 27a8b5df83d650129fb7ed4f73272f08bd92f72c2622e96c5145048ee442a39f - md5: 093769d5e96a6940cf10086af031dbca + url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-ha77ad07_0.conda + sha256: 17391511d8229d147632f3da0d55ceabcf2e09842b4e3ac110f7290729cec35a + md5: a0a698cd3f459efd08da14cf532e7c0a depends: - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 967558 - timestamp: 1721865277797 + size: 969856 + timestamp: 1727386384989 - kind: conda name: azure-storage-common-cpp - version: 12.7.0 - build: h10ac4d7_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.7.0-h10ac4d7_1.conda - sha256: 1030fa54497a73eb78c509d451f25701e2e781dc182e7647f55719f1e1f9bee8 - md5: ab6d507ad16dbe2157920451d662e4a1 + version: 12.8.0 + build: h148e6f0_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-h148e6f0_0.conda + sha256: 1446059dbeed5940466837afd0f028f34aaca93d4a02b62b1a49fc48016fd8aa + md5: b3f5dabec8fb63d8f3f72a7d95bcc583 depends: - - __glibc >=2.17,<3.0.a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libxml2 >=2.12.7,<3.0a0 - - openssl >=3.3.1,<4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 143039 - timestamp: 1721832724803 + size: 241069 + timestamp: 1727272027753 - kind: conda name: azure-storage-common-cpp - version: 12.7.0 - build: h148e6f0_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.7.0-h148e6f0_1.conda - sha256: e65871ff5c3f6e19d21f9e98318de93fbed2ead70f1e6f379246c5e696bd87a7 - md5: 9802dfd947dba7777ffcb25078c59c2d + version: 12.8.0 + build: ha3822c6_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-ha3822c6_0.conda + sha256: 3bb614675fcae708eeb344fe1a21040007ab4efb1de2bd0f1c6171e4c23eb092 + md5: d4a0f1f34459947781a6975beb505fa9 depends: + - __glibc >=2.17,<3.0.a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libgcc >=13 + - libstdcxx >=13 + - libxml2 >=2.12.7,<3.0a0 + - openssl >=3.3.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 239921 - timestamp: 1721833165139 + size: 149394 + timestamp: 1727271583048 - kind: conda name: azure-storage-common-cpp - version: 12.7.0 - build: hf91904f_1 - build_number: 1 + version: 12.8.0 + build: hdeff353_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.7.0-hf91904f_1.conda - sha256: 333599899b25ef22e2a2e1c09bab75203da9f47612e1ff2a40fddae76feb08eb - md5: 99146c62f4b2a74c3026f128f42e35bf + url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-hdeff353_0.conda + sha256: 12751ee03a7620cfa453184782d303e7763fc498d1710ff74e15e6ed74244164 + md5: 4f5d3bf98b8c5eef9c65d3d8017ce47a depends: - __osx >=10.13 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - libcxx >=16 + - libcxx >=17 - libxml2 >=2.12.7,<3.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 124472 - timestamp: 1721832914540 + size: 126050 + timestamp: 1727271812046 - kind: conda name: azure-storage-files-datalake-cpp - version: 12.11.0 - build: h14965f0_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.11.0-h14965f0_1.conda - sha256: 73ada329714a4893238737d77be147b1e1412f80fa94191c3f686eae0bee459c - md5: d99c3c0c72b11340028cac4689835c0c + version: 12.12.0 + build: h0f25b8a_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-h0f25b8a_0.conda + sha256: 697ce350677465c2532c988ad6a4754c471ffa587c1b6e8833c2e73c0a697300 + md5: 7477b3a3f09927fbc47cd69e3f3430ea depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 - - libcxx >=16 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 + - libgcc >=13 + - libstdcxx >=13 license: MIT license_family: MIT purls: [] - size: 192115 - timestamp: 1721925157499 + size: 286941 + timestamp: 1727554386014 - kind: conda name: azure-storage-files-datalake-cpp - version: 12.11.0 - build: h325d260_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.11.0-h325d260_1.conda - sha256: 1726fa324bb402e52d63227d6cb3f849957cd6841f8cb8aed58bb0c81203befb - md5: 11d926d1f4a75a1b03d1c053ca20424b + version: 12.12.0 + build: h2123174_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h2123174_0.conda + sha256: ca5a432e5a68757cc6bf64c25d3bb45398e7a68b9ed1c346f266dbcc8c9d3af7 + md5: 8efd606971a03ceacf25feac63968834 depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 + - libcxx >=17 license: MIT license_family: MIT purls: [] - size: 274492 - timestamp: 1721925100762 + size: 200677 + timestamp: 1727554487274 - kind: conda name: babel version: 2.14.0 @@ -3942,21 +3943,21 @@ packages: timestamp: 1702422720584 - kind: conda name: beartype - version: 0.18.5 + version: 0.19.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/beartype-0.18.5-pyhd8ed1ab_0.conda - sha256: b68b7db7b849d999c5cc97b831e06a490c3dcb64aad84367c0969139a7a8f844 - md5: 28786996506a2f2dd7819b5f3705f4e4 + url: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda + sha256: b99118bdb935028cb854e107d8aa007522fb02831be64c27f8c29f616e0cd9c0 + md5: 4d35362664efdc0c9de9b685f94ecae3 depends: - python >=3.8 license: MIT license_family: MIT purls: - pkg:pypi/beartype?source=hash-mapping - size: 766954 - timestamp: 1713735111213 + size: 855068 + timestamp: 1727502610463 - kind: conda name: beautifulsoup4 version: 4.12.3 @@ -4349,12 +4350,12 @@ packages: timestamp: 1720974491916 - kind: conda name: c-ares - version: 1.33.1 + version: 1.34.1 build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.33.1-h2466b09_0.conda - sha256: 2cc89d816e39c7a8afdb0bdb46c3c8558ab3e174397be3300112159758736919 - md5: 8415a266788fd249f5e137487db796b0 + url: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.1-h2466b09_0.conda + sha256: 8fb691558b774f09e279bc10a401e39507f634d5762993b2fb4e5c49b772f933 + md5: beed99d68e1513f7af98a3ea8c8bd9ab depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -4362,39 +4363,39 @@ packages: license: MIT license_family: MIT purls: [] - size: 166630 - timestamp: 1724438651925 + size: 193256 + timestamp: 1728535073722 - kind: conda name: c-ares - version: 1.33.1 + version: 1.34.1 build: h44e7173_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda - sha256: 98b0ac09472e6737fc4685147d1755028cc650d428369cbe3cb74ab38b327095 - md5: b31a2de5edfddb308dda802eab2956dc + url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.1-h44e7173_0.conda + sha256: 004fea4112ce5a862271265e908a762843390e1870dacfd1a9a38c9aad902e9c + md5: 611618b0b3949f947da65c96ff9c51fb depends: - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 163203 - timestamp: 1724438157472 + size: 182470 + timestamp: 1728534760896 - kind: conda name: c-ares - version: 1.33.1 + version: 1.34.1 build: heb4867d_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda - sha256: 2cb24f613eaf2850b1a08f28f967b10d8bd44ef623efa0154dc45eb718776be6 - md5: 0d3c60291342c0c025db231353376dfb + url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.1-heb4867d_0.conda + sha256: d7e50b2ce3ef01dfbb11e8f50411b4be91b92c94cd10a83c843f1f2e53832e04 + md5: db792eada25e970c46642f624b029fd7 depends: - __glibc >=2.28,<3.0.a0 - - libgcc-ng >=13 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 182796 - timestamp: 1724438109690 + size: 204891 + timestamp: 1728534705052 - kind: conda name: ca-certificates version: 2024.8.30 @@ -4553,7 +4554,7 @@ packages: - libglib >=2.80.3,<3.0a0 - libpng >=1.6.43,<1.7.0a0 - libstdcxx-ng >=12 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 - libzlib >=1.3.1,<2.0a0 - pixman >=0.43.2,<1.0a0 - xorg-libice >=1.1.1,<2.0a0 @@ -4720,21 +4721,21 @@ packages: timestamp: 1718906379286 - kind: conda name: charset-normalizer - version: 3.3.2 + version: 3.4.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda - sha256: 20cae47d31fdd58d99c4d2e65fbdcefa0b0de0c84e455ba9d6356a4bdbc4b5b9 - md5: 7f4a9e3fcff3f6356ae99244a014da6a + url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda + sha256: 1873ac45ea61f95750cb0b4e5e675d1c5b3def937e80c7eebb19297f76810be8 + md5: a374efa97290b8799046df7c5ca17164 depends: - python >=3.7 license: MIT license_family: MIT purls: - pkg:pypi/charset-normalizer?source=hash-mapping - size: 46597 - timestamp: 1698833765762 + size: 47314 + timestamp: 1728479405343 - kind: conda name: click version: 8.1.7 @@ -4889,12 +4890,12 @@ packages: - kind: conda name: contourpy version: 1.3.0 - build: py312h68727a3_1 - build_number: 1 + build: py312h68727a3_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_1.conda - sha256: e459bc2d05fabfffcf9bf1f3725e36a5ef64ae7f0b5af312eeaed2e0519e22c8 - md5: 6b9f9141c247bdd61a2d6d37e0a8b530 + url: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda + sha256: 777ff055866872f45f0f8d2ad17a0c42f3c63463f8c1da9d75fa5b1652940b50 + md5: ff28f374b31937c048107521c814791e depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -4906,17 +4907,17 @@ packages: license_family: BSD purls: - pkg:pypi/contourpy?source=hash-mapping - size: 272322 - timestamp: 1725378526351 + size: 276004 + timestamp: 1727293728397 - kind: conda name: contourpy version: 1.3.0 - build: py312hc5c4d5f_1 - build_number: 1 + build: py312hc5c4d5f_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_1.conda - sha256: 9ccd546024da0a3d2695e21d780f75745b3427047dc073ce88ef545f5bf2e3df - md5: 68996da74a346963430ace9984d627b4 + url: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_2.conda + sha256: fd7277e1085c5dad3e6b7196e253807df2bd6fc6e34f8e376a71b9a7bd05b82b + md5: 272979666cda74f84d9c158b378237b6 depends: - __osx >=10.13 - libcxx >=17 @@ -4927,17 +4928,17 @@ packages: license_family: BSD purls: - pkg:pypi/contourpy?source=hash-mapping - size: 256566 - timestamp: 1725378501399 + size: 260301 + timestamp: 1727293933046 - kind: conda name: contourpy version: 1.3.0 - build: py312hd5eb7cc_1 - build_number: 1 + build: py312hd5eb7cc_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_1.conda - sha256: 557d32fd30108c4fd44fba60621e30519c1fcf6a361cfd8bef1f3e3eac51eb99 - md5: 1e7201bef33d1d3da3bf95bf0c273879 + url: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_2.conda + sha256: d151bce7aa260d9b930a02b0e559f3f2655648c113dbaeb4ac4149db73a1f8d6 + md5: 1792f9a7cf336580fb0137691a00e7a9 depends: - numpy >=1.23 - python >=3.12,<3.13.0a0 @@ -4949,71 +4950,81 @@ packages: license_family: BSD purls: - pkg:pypi/contourpy?source=hash-mapping - size: 214543 - timestamp: 1725378879919 + size: 216606 + timestamp: 1727294254434 - kind: conda name: coverage - version: 7.6.1 - build: py312h4389bb4_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.1-py312h4389bb4_1.conda - sha256: cca6398754855d8ffa8412b58a4439f0f183013ae730962ef9cc8150525f3871 - md5: 49b4e0600c84e7d53aae4c042f1e2e4a + version: 7.6.2 + build: py312h3d0f464_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.2-py312h3d0f464_0.conda + sha256: 892c9a4dc830d4ab1b4a4abd10c079338a0a80d81adf0940e45ad2762db8315a + md5: 8e87296799e87fa5de5e82f4473cf764 depends: + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tomli - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: Apache-2.0 - license_family: APACHE purls: - pkg:pypi/coverage?source=hash-mapping - size: 388697 - timestamp: 1724954338520 + size: 362094 + timestamp: 1728527536188 - kind: conda name: coverage - version: 7.6.1 - build: py312h66e93f0_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h66e93f0_1.conda - sha256: 1ad422ed302e3630b26e23238bd1d047674b153c4f0a99e7773faa591aa7eab9 - md5: 5dc6e358ee0af388564bd0eba635cf9e + version: 7.6.2 + build: py312h4389bb4_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.2-py312h4389bb4_0.conda + sha256: 442f865d0c18243a7b8ffeb8eece05929a5345b8464185008d651683dab97a40 + md5: b0e1bc38ada7fdf2dd7343bb9f294cd0 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tomli + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 - license_family: APACHE purls: - pkg:pypi/coverage?source=hash-mapping - size: 363627 - timestamp: 1724953903049 + size: 387590 + timestamp: 1728528255329 - kind: conda name: coverage - version: 7.6.1 - build: py312hb553811_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.1-py312hb553811_1.conda - sha256: fd0f5c84ef943618b378592e74010831a7962127e2759ea75437117ad3f00eee - md5: 49f066bb9337fd34a4c9c09f576ce136 + version: 7.6.2 + build: py312h66e93f0_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.2-py312h66e93f0_0.conda + sha256: a48fd12d3a2b021998fff3588cbd811386c64528111d5d284a73dfc9a552495b + md5: fa85b4b778217fbeb88425985f001497 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tomli license: Apache-2.0 - license_family: APACHE purls: - pkg:pypi/coverage?source=hash-mapping - size: 362574 - timestamp: 1724954071768 + size: 363534 + timestamp: 1728527478116 +- kind: conda + name: cpython + version: 3.12.7 + build: py312hd8ed1ab_0 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.7-py312hd8ed1ab_0.conda + sha256: 9bbd08c83cc9c3142755b96dc5f3e0f0370d7afdb773c8285359b31e7ce96f0a + md5: f0d1309310498284ab13c9fd73db4781 + depends: + - python 3.12.7.* + - python_abi * *_cp312 + license: Python-2.0 + purls: [] + size: 44632 + timestamp: 1728057282977 - kind: conda name: cycler version: 0.12.1 @@ -5032,33 +5043,78 @@ packages: size: 13458 timestamp: 1696677888423 - kind: conda - name: cytoolz - version: 0.12.3 - build: py312h41838bb_0 + name: cyrus-sasl + version: 2.1.27 + build: h54b06d7_7 + build_number: 7 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda + sha256: d2ea5e52da745c4249e1a818095a28f9c57bd4df22cbfc645352defa468e86c2 + md5: dce22f70b4e5a407ce88f2be046f4ceb + depends: + - krb5 >=1.21.1,<1.22.0a0 + - libgcc-ng >=12 + - libntlm + - libstdcxx-ng >=12 + - openssl >=3.1.1,<4.0a0 + license: BSD-3-Clause-Attribution + license_family: BSD + purls: [] + size: 219527 + timestamp: 1690061203707 +- kind: conda + name: cyrus-sasl + version: 2.1.27 + build: hf9bab2b_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-0.12.3-py312h41838bb_0.conda - sha256: 79df086b364cb7b80c367d6b19fc2fad7879e89d811bb7e9c25ddf02e94d7959 - md5: b8e837f24cd0f2a2478ecf2299ce55a8 + url: https://conda.anaconda.org/conda-forge/osx-64/cyrus-sasl-2.1.27-hf9bab2b_7.conda + sha256: d4be27d58beb762f9392a35053404d5129e1ec41d24a9a7b465b4d84de2e5819 + md5: b3a8aa48d3d5e1bfb31ee3bde1f2c544 + depends: + - krb5 >=1.21.1,<1.22.0a0 + - libcxx >=15.0.7 + - libntlm + - openssl >=3.1.1,<4.0a0 + license: BSD-3-Clause-Attribution + license_family: BSD + purls: [] + size: 209174 + timestamp: 1690061476074 +- kind: conda + name: cytoolz + version: 1.0.0 + build: py312h4389bb4_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/cytoolz-1.0.0-py312h4389bb4_1.conda + sha256: 640df307a193ad3b34e97a8c274428247ad613289d612de7f8d775a6087a5aef + md5: d957d8852eb4b4a4f8b6af1e8a0cc044 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - toolz >=0.10.0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/cytoolz?source=hash-mapping - size: 342008 - timestamp: 1706897335369 + size: 316445 + timestamp: 1728335602220 - kind: conda name: cytoolz - version: 0.12.3 - build: py312h98912ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py312h98912ed_0.conda - sha256: 8fae95ac24fb9dc05ee0284c929869cb97467319460bafac52956c79b1fee3f0 - md5: a4fbffb84a54767266c69e3699078a00 - depends: - - libgcc-ng >=12 + version: 1.0.0 + build: py312h66e93f0_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda + sha256: 73ad7e01d83734a1418be3a225c14d7840ad93f21cecb13d75a3ca5ea9a464c8 + md5: a921e2fe122e7f38417b9b17c7a13343 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - toolz >=0.10.0 @@ -5066,29 +5122,28 @@ packages: license_family: BSD purls: - pkg:pypi/cytoolz?source=hash-mapping - size: 393874 - timestamp: 1706897203319 + size: 395285 + timestamp: 1728335183361 - kind: conda name: cytoolz - version: 0.12.3 - build: py312he70551f_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.3-py312he70551f_0.conda - sha256: 9b3a63be81910d653e2ef7ceba12f22c92e22ca2fd5cb37e72aa1bef8e6d8fc3 - md5: bf01d5b4e152592d0483cc10df040ad8 + version: 1.0.0 + build: py312hb553811_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-1.0.0-py312hb553811_1.conda + sha256: 137cdf341c8c5588ce937f312c4ae0c5361dbdaadcce3f87337f793bdf030032 + md5: 88916f1dbad108ee2db9b9f4df2b6b36 depends: + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - toolz >=0.10.0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/cytoolz?source=hash-mapping - size: 315464 - timestamp: 1706897770551 + size: 343379 + timestamp: 1728335175039 - kind: conda name: dart-sass version: 1.58.3 @@ -5133,22 +5188,22 @@ packages: timestamp: 1683598364427 - kind: conda name: dask - version: 2024.8.2 + version: 2024.9.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda - sha256: 6afd548c338bb418d9645081cbe49b93ffa70f0fb74d9c3c4ed7defd910178ea - md5: 3adbad9b363bd0163ef2ac59f095cc13 + url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda + sha256: 0007c996c91891df3a3fe3d6b8265f0b602396989d4ce87cd78d88fd94dfac48 + md5: f4a81fe958755b2db083566a6a2da06f depends: - - bokeh >=2.4.2,!=3.0.* + - bokeh >=3.1.0 - cytoolz >=0.11.0 - - dask-core >=2024.8.2,<2024.8.3.0a0 + - dask-core >=2024.9.1,<2024.9.2.0a0 - dask-expr >=1.1,<1.2 - - distributed >=2024.8.2,<2024.8.3.0a0 + - distributed >=2024.9.1,<2024.9.2.0a0 - jinja2 >=2.10.3 - lz4 >=4.3.2 - - numpy >=1.21 + - numpy >=1.24 - pandas >=2.0 - pyarrow >=7.0 - pyarrow-hotfix @@ -5159,16 +5214,16 @@ packages: license_family: BSD purls: [] size: 7417 - timestamp: 1725064395582 + timestamp: 1727494165691 - kind: conda name: dask-core - version: 2024.8.2 + version: 2024.9.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda - sha256: 1c1b86b719262a7d557327f5c1e363e7039a4078c42270a19dcd9af42fe1404f - md5: 8e7524a2fb561506260db789806c7ee9 + url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda + sha256: 08d01f45f711fcb093e04a491825f9dd0f4129e6432587f5f84a3cbd10a4030d + md5: 0bcf33226f8dbe7e2d6acefb99a7323f depends: - click >=8.1 - cloudpickle >=3.0.0 @@ -5183,19 +5238,19 @@ packages: license_family: BSD purls: - pkg:pypi/dask?source=hash-mapping - size: 888258 - timestamp: 1725051212771 + size: 896858 + timestamp: 1727485758122 - kind: conda name: dask-expr - version: 1.1.13 + version: 1.1.15 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda - sha256: e1b570064d24e85278c53c87e4e361e60fb01a156ce026eac310ff9dcbd85111 - md5: b77166a6032a2b8e52b3fee90d62ea4d + url: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda + sha256: 7ff9c79fc369de2b9da433108cd02ed5a99045493b366f6ce7acaa2dd097a6b0 + md5: 865cd3fdeffd42a9682f3bb992e828e8 depends: - - dask-core 2024.8.2 + - dask-core 2024.9.1 - pandas >=2 - pyarrow - python >=3.10 @@ -5203,8 +5258,8 @@ packages: license_family: BSD purls: - pkg:pypi/dask-expr?source=hash-mapping - size: 185183 - timestamp: 1725321008333 + size: 185216 + timestamp: 1727490062118 - kind: conda name: dbus version: 1.13.6 @@ -5225,13 +5280,12 @@ packages: timestamp: 1640112124844 - kind: conda name: debugpy - version: 1.8.5 - build: py312h275cf98_1 - build_number: 1 + version: 1.8.6 + build: py312h275cf98_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.5-py312h275cf98_1.conda - sha256: 44403893fe8d5c2b3416d8377fce34f04b3cb8f4dc79e19161b024cde6814df3 - md5: 51b54280745ac5573ed0937c71c0e514 + url: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.6-py312h275cf98_0.conda + sha256: 048929ec428f8f288a60d273a1da897735249f63a4026bc6a8fe3bc4f245a797 + md5: c02f5ecd381057f141fb33c5aa319610 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -5242,17 +5296,16 @@ packages: license_family: MIT purls: - pkg:pypi/debugpy?source=hash-mapping - size: 3174333 - timestamp: 1725269561740 + size: 3576437 + timestamp: 1727241342607 - kind: conda name: debugpy - version: 1.8.5 - build: py312h2ec8cdc_1 - build_number: 1 + version: 1.8.6 + build: py312h2ec8cdc_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.5-py312h2ec8cdc_1.conda - sha256: 63b027e5605955d22d6bd491316c81876363bce36c7b5fea006a664337d77686 - md5: f89b813bd9fe5ae6e3b7d17e17801f68 + url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.6-py312h2ec8cdc_0.conda + sha256: 3c75b1358046c8b4d9ccd6df509f07859de6554a781a5eb46c90f295c499afab + md5: f5ca5a690ff9100b7a05d26f77d88156 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -5263,17 +5316,16 @@ packages: license_family: MIT purls: - pkg:pypi/debugpy?source=hash-mapping - size: 2085616 - timestamp: 1725269284102 + size: 2642177 + timestamp: 1727240850721 - kind: conda name: debugpy - version: 1.8.5 - build: py312h5861a67_1 - build_number: 1 + version: 1.8.6 + build: py312h5861a67_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.5-py312h5861a67_1.conda - sha256: 5ad4567872a0aa9f0dace65d9f6b4315f452df7d238bec6a4482c5527b7762fc - md5: 87fcafa1ac8e06b6acd5ee95632adf87 + url: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.6-py312h5861a67_0.conda + sha256: 71ee52f2b8676767ad781c2038873b06300b851729ca2fc3c4b8a5e211f229b6 + md5: 5dcf9133d68237c59931ab728e6ccadc depends: - __osx >=10.13 - libcxx >=17 @@ -5283,8 +5335,8 @@ packages: license_family: MIT purls: - pkg:pypi/debugpy?source=hash-mapping - size: 2059098 - timestamp: 1725269547461 + size: 2526304 + timestamp: 1727240828899 - kind: conda name: decorator version: 5.1.1 @@ -5418,35 +5470,34 @@ packages: timestamp: 1682953401972 - kind: conda name: distlib - version: 0.3.8 + version: 0.3.9 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda - sha256: 3ff11acdd5cc2f80227682966916e878e45ced94f59c402efb94911a5774e84e - md5: db16c66b759a64dc5183d69cc3745a52 + url: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + sha256: 300b2e714f59403df0560174f5ef6c19db8b4a3b74a7244862cf771f07dee8fb + md5: fe521c1608280cc2803ebd26dc252212 depends: - python 2.7|>=3.6 license: Apache-2.0 - license_family: APACHE purls: - pkg:pypi/distlib?source=hash-mapping - size: 274915 - timestamp: 1702383349284 + size: 276214 + timestamp: 1728557312342 - kind: conda name: distributed - version: 2024.8.2 + version: 2024.9.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda - sha256: b0eb013bc9fa6d88424ec7bf2a9fb82448d2457edacccc798dea5ef760a6ef01 - md5: 44d22b5d98a219a4c35cafe9bf3b9ce2 + url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda + sha256: d4d934d3b5c73d8ccadd7c1b37cfba99b096403f33d4cf0085108daeed46e3c9 + md5: a9f1c72da2654a8ae07a33ed3975d328 depends: - click >=8.0 - cloudpickle >=3.0.0 - cytoolz >=0.11.2 - - dask-core >=2024.8.2,<2024.8.3.0a0 + - dask-core >=2024.9.1,<2024.9.2.0a0 - jinja2 >=2.10.3 - locket >=1.0.0 - msgpack-python >=1.0.2 @@ -5466,8 +5517,8 @@ packages: license_family: BSD purls: - pkg:pypi/distributed?source=hash-mapping - size: 798375 - timestamp: 1725058359740 + size: 801109 + timestamp: 1727490025224 - kind: conda name: double-conversion version: 3.3.0 @@ -5520,45 +5571,45 @@ packages: timestamp: 1643888357950 - kind: conda name: esbuild - version: 0.23.1 + version: 0.24.0 build: h57928b3_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.23.1-h57928b3_0.conda - sha256: 987d31dda73564de7a38b51b12f42f9ba23ac356a60614729b46bda3b53acc50 - md5: 20023013c76ef62e689fa34b8e51199a + url: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.24.0-h57928b3_0.conda + sha256: 8a41f8838a024fc3d132eb8d5cfc8a2f73d812e516e4f99c8a1bfd082ef931cb + md5: 333e68de31627c2a782cd24cc19ed385 license: MIT license_family: MIT purls: [] - size: 3708244 - timestamp: 1724050551357 + size: 3707406 + timestamp: 1727001690308 - kind: conda name: esbuild - version: 0.23.1 + version: 0.24.0 build: h694c41f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.23.1-h694c41f_0.conda - sha256: 0828b2997ce2671ff1c55145037574e941a02569a2dbbc475eb68696779a19f4 - md5: 1c23b8c781689627dd6ded1ec90d1ea3 + url: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.24.0-h694c41f_0.conda + sha256: 06e40416f9934f507b68f7c9b3c59d04e427ab4995044e62fd20e05ee3c32214 + md5: ed8675524721c7bfb14bb3d92e7ef9cb constrains: - __osx>=10.12 license: MIT license_family: MIT purls: [] - size: 3701817 - timestamp: 1724050164186 + size: 3689609 + timestamp: 1727000975335 - kind: conda name: esbuild - version: 0.23.1 + version: 0.24.0 build: ha770c72_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.23.1-ha770c72_0.conda - sha256: 8d62023e9066d40ac99dec68b8ced2e123f4ffeb4288a4c716c1d397cdf115ed - md5: 7e6d8acdf44ce73a7fd2f36e83f4f2ec + url: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.24.0-ha770c72_0.conda + sha256: 38bfefeafd4dfad798615740e424efacb2122b81eacfce736bc6a90b05ccd8a5 + md5: b2cdfd1d3c62519b913d71f200e7f9ca license: MIT license_family: MIT purls: [] - size: 3634821 - timestamp: 1724050078722 + size: 3636196 + timestamp: 1727000941821 - kind: conda name: et_xmlfile version: 1.1.0 @@ -5679,29 +5730,29 @@ packages: timestamp: 1725569133557 - kind: conda name: filelock - version: 3.16.0 + version: 3.16.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda - sha256: f55c9af3d92a363fa9e4f164038db85a028befb65d56df0b2cb34911eba8a37a - md5: ec288789b07ae3be555046e099798a56 + url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + sha256: 1da766da9dba05091af87977922fe60dc7464091a9ccffb3765d403189d39be4 + md5: 916f8ec5dd4128cd5f207a3c4c07b2c6 depends: - python >=3.7 license: Unlicense purls: - pkg:pypi/filelock?source=hash-mapping - size: 17402 - timestamp: 1725740654220 + size: 17357 + timestamp: 1726613593584 - kind: conda name: fiona - version: 1.10.0 + version: 1.10.1 build: py312h5aa26c2_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.0-py312h5aa26c2_1.conda - sha256: 8ecbc114d35f1bea4f90107d82773c217c1d1111db51c82af5c215a28f4ae927 - md5: dcff3350fc47354d59e05760db669e7f + url: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py312h5aa26c2_1.conda + sha256: 36f7f5d852a28c2a18d1cd31c02bb3ec0bfdb6b3ee1c3a0118827110e5457d18 + md5: 4a30f4277a1894928a7057d0e14c1c95 depends: - __glibc >=2.17,<3.0.a0 - attrs >=19.2.0 @@ -5718,19 +5769,20 @@ packages: - python_abi 3.12.* *_cp312 - shapely license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/fiona?source=hash-mapping - size: 1175161 - timestamp: 1726075405189 + size: 1171712 + timestamp: 1726664552119 - kind: conda name: fiona - version: 1.10.0 + version: 1.10.1 build: py312hcd3578f_1 build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.0-py312hcd3578f_1.conda - sha256: 9525e0bbc4e3ead0cde0475bd9cf43e5a03ee5a7dd0b1f6f90b300bfd36fd3f5 - md5: 36140048bbc860971d60f6ecc20d2596 + url: https://conda.anaconda.org/conda-forge/osx-64/fiona-1.10.1-py312hcd3578f_1.conda + sha256: a5de22e8ac633b79f6e8a14344cbc56dd1aca04b106f4e2c16e5552e8186e394 + md5: f4fa2a4c6c894923c24363e76d173ca3 depends: - __osx >=10.13 - attrs >=19.2.0 @@ -5746,19 +5798,20 @@ packages: - python_abi 3.12.* *_cp312 - shapely license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/fiona?source=hash-mapping - size: 1024770 - timestamp: 1726075810757 + size: 1025449 + timestamp: 1726664744089 - kind: conda name: fiona - version: 1.10.0 + version: 1.10.1 build: py312hd215820_1 build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.0-py312hd215820_1.conda - sha256: adc71ebb2b15cf59e4174af2469109a084a7c8e1d41fa98a7aa5bd4c00d77896 - md5: 8b7f1e27c03057f3da5cc200f36051bc + url: https://conda.anaconda.org/conda-forge/win-64/fiona-1.10.1-py312hd215820_1.conda + sha256: 0f91c93cff199371f665fe0807d4c871dd6e6534c8c9b5ede6897c867b055c23 + md5: 1b03775deb65180f956a7c1b0e6edc5f depends: - attrs >=19.2.0 - click >=8.0,<9.dev0 @@ -5775,10 +5828,11 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/fiona?source=hash-mapping - size: 975399 - timestamp: 1726075777728 + size: 976102 + timestamp: 1726664869512 - kind: conda name: fmt version: 11.0.2 @@ -5896,18 +5950,18 @@ packages: - kind: conda name: font-ttf-ubuntu version: '0.83' - build: h77eed37_2 - build_number: 2 + build: h77eed37_3 + build_number: 3 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda - sha256: c940f6e969143e13a3a9660abb3c7e7e23b8319efb29dbdd5dee0b9939236e13 - md5: cbbe59391138ea5ad3658c76912e147f + url: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda + sha256: 2821ec1dc454bd8b9a31d0ed22a7ce22422c0aef163c59f49dfdf915d0f0ca14 + md5: 49023d73832ef61042f6a237cb2687e7 license: LicenseRef-Ubuntu-Font-Licence-Version-1.0 license_family: Other purls: [] - size: 1622566 - timestamp: 1714483134319 + size: 1620504 + timestamp: 1727511233259 - kind: conda name: fontconfig version: 2.14.2 @@ -6002,13 +6056,12 @@ packages: timestamp: 1566932280397 - kind: conda name: fonttools - version: 4.53.1 - build: py312h4389bb4_1 - build_number: 1 + version: 4.54.1 + build: py312h4389bb4_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.53.1-py312h4389bb4_1.conda - sha256: 0f0300e6c6053d9f16844af06c60650c59e20f1e4b1a944bdf0b23377fb2f616 - md5: 6663e0f27c39d39504617e4fe4da3bf6 + url: https://conda.anaconda.org/conda-forge/win-64/fonttools-4.54.1-py312h4389bb4_0.conda + sha256: e2307a41c022617e0533b853d999d40c25cab78aed41cc68c5732139755aa23d + md5: f6e3e9518917d864629781bcbe3f8cba depends: - brotli - munkres @@ -6021,17 +6074,16 @@ packages: license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2406661 - timestamp: 1725391814010 + size: 2448344 + timestamp: 1727206919876 - kind: conda name: fonttools - version: 4.53.1 - build: py312h66e93f0_1 - build_number: 1 + version: 4.54.1 + build: py312h66e93f0_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h66e93f0_1.conda - sha256: 19e4bc017b219e02de712e948d48a23c8bb98dabe741c807949c7fb48abe71d8 - md5: 7abb7d39d482ac3b8e27e6c0fff3b168 + url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h66e93f0_0.conda + sha256: 3b5257607728c21e093255a7f5595bdcfce143638f96b704f3913bf64bdde8a6 + md5: e311030d9322f6f77e71e013490c83b2 depends: - __glibc >=2.17,<3.0.a0 - brotli @@ -6043,17 +6095,16 @@ packages: license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2797458 - timestamp: 1725391437161 + size: 2845915 + timestamp: 1727206550625 - kind: conda name: fonttools - version: 4.53.1 - build: py312hb553811_1 - build_number: 1 + version: 4.54.1 + build: py312hb553811_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.53.1-py312hb553811_1.conda - sha256: 09f1d89bb6ecff8704162a5bd8c1309a978ad5e3a3a4dfe009ea22fb75d070cb - md5: df00a7504c74682d63ae89c32687a3a2 + url: https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.54.1-py312hb553811_0.conda + sha256: 0fead35d7799f6363ea9404cdbe3f4304e0d696cdb399329422d05d4c7f77442 + md5: f664d25c5c512eb315c0f31729325255 depends: - __osx >=10.13 - brotli @@ -6064,8 +6115,8 @@ packages: license_family: MIT purls: - pkg:pypi/fonttools?source=hash-mapping - size: 2724931 - timestamp: 1725391478296 + size: 2743146 + timestamp: 1727206498541 - kind: conda name: fqdn version: 1.5.1 @@ -6211,12 +6262,12 @@ packages: - kind: conda name: gdal version: 3.9.2 - build: py312h1299960_2 - build_number: 2 + build: py312h1299960_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_2.conda - sha256: edd3249b5c4d5fc84ac5aafe7a41f1a4e5d5ff2d474f37a7ce2484d50553b6a8 - md5: ffe1e203655d54e3d2e42de350c5785f + url: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_7.conda + sha256: 656eea557c38f713cb6d94efa798ecae9b70b807976d2e99fd7080462575098d + md5: 9cf27e3f9d97ea13f250db9253a25dc8 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -6228,19 +6279,20 @@ packages: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT + license_family: MIT purls: - pkg:pypi/gdal?source=hash-mapping - size: 1697617 - timestamp: 1726092649530 + size: 1698682 + timestamp: 1728293498040 - kind: conda name: gdal version: 3.9.2 - build: py312h16ac12d_2 - build_number: 2 + build: py312h16ac12d_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_2.conda - sha256: b912c4856443b46ceeaf70aaecd831357331560abb87321f481943670dac4476 - md5: 5fe9072721d3fe55f3d8b016019ccadd + url: https://conda.anaconda.org/conda-forge/win-64/gdal-3.9.2-py312h16ac12d_7.conda + sha256: 4247071c2216b9813f6732edc9c4728bb11f6877a1c87000b6695eebef741641 + md5: 4a093f053f2827cfa102304bf945e845 depends: - libgdal-core 3.9.2.* - libkml >=1.3.0,<1.4.0a0 @@ -6252,19 +6304,20 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: - pkg:pypi/gdal?source=hash-mapping - size: 1637570 - timestamp: 1726094182525 + size: 1634719 + timestamp: 1728294392653 - kind: conda name: gdal version: 3.9.2 - build: py312h365dfcf_2 - build_number: 2 + build: py312h365dfcf_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_2.conda - sha256: 502bb52738130128f5dfe692dd8b206160c7a8c286c21f14a6ec689529a024d3 - md5: 640b75d6bebe65db4d09f999f7230f38 + url: https://conda.anaconda.org/conda-forge/osx-64/gdal-3.9.2-py312h365dfcf_7.conda + sha256: 0df4b2136311b288f52a9532accc50e45898eb512bae70a1f3db432d1204763b + md5: d9cdf305ba3e2dc9989bfa116001104b depends: - __osx >=10.13 - libcxx >=17 @@ -6275,23 +6328,24 @@ packages: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT + license_family: MIT purls: - pkg:pypi/gdal?source=hash-mapping - size: 1679539 - timestamp: 1726092790963 + size: 1680288 + timestamp: 1728293401273 - kind: conda name: geocube - version: 0.5.2 + version: 0.7.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/geocube-0.5.2-pyhd8ed1ab_0.conda - sha256: e926339ffb33ba2de50c0e29f5871d9eb196f65a3c9807d4aef64b9f067fe57d - md5: 2b09edce0b7c79d2da85ae52bd68ca95 + url: https://conda.anaconda.org/conda-forge/noarch/geocube-0.7.0-pyhd8ed1ab_0.conda + sha256: 88ac10e11363ecf99561ef4ff9260983b348a8845431aec125f14df0d174e96c + md5: 8887c687ee10662fbf1b22fc5b7e7dba depends: - appdirs - click >=6.0 - - geopandas >=0.7 + - geopandas >=1 - numpy >=1.20 - odc-geo - pyproj >=2 @@ -6304,8 +6358,8 @@ packages: license_family: BSD purls: - pkg:pypi/geocube?source=hash-mapping - size: 23157 - timestamp: 1713410458393 + size: 24282 + timestamp: 1728413681823 - kind: conda name: geographiclib version: '2.0' @@ -6326,15 +6380,16 @@ packages: - kind: conda name: geopandas version: 1.0.1 - build: pyhd8ed1ab_0 + build: pyhd8ed1ab_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_0.conda - sha256: 6d3f8148d88b1f2c100e03fce441f19577f6a4b69e3a2c57d522b48010d84f5f - md5: efef4ce75a678216d510d08222845c7f + url: https://conda.anaconda.org/conda-forge/noarch/geopandas-1.0.1-pyhd8ed1ab_1.conda + sha256: ea0e200967b93a1342670bee137917e93d04742f3c3c626fe435ebb29462bbd7 + md5: 79a9a8d2fd39ecb4081c0df0c10135dc depends: - folium - - geopandas-base 1.0.1 pyha770c72_0 + - geopandas-base 1.0.1 pyha770c72_1 - mapclassify >=2.4.0 - matplotlib-base - pyogrio >=0.7.2 @@ -6344,17 +6399,18 @@ packages: license: BSD-3-Clause license_family: BSD purls: [] - size: 7438 - timestamp: 1719933423912 + size: 7545 + timestamp: 1726898026216 - kind: conda name: geopandas-base version: 1.0.1 - build: pyha770c72_0 + build: pyha770c72_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_0.conda - sha256: b07d76f79cc3b1dc7f5a73aeeb0f7c9977526d73237df7e200582fdff48045d1 - md5: 1b7d46173c29e14dde41f97cf5aa61df + url: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-1.0.1-pyha770c72_1.conda + sha256: 1b0853491a299e95d57ccf3f3c9053a1b7e49fc9b2ad959f321b0717e567e249 + md5: cad8d8e1583463e7642adc72a76dc3c5 depends: - numpy >=1.22 - packaging @@ -6365,8 +6421,8 @@ packages: license_family: BSD purls: - pkg:pypi/geopandas?source=hash-mapping - size: 239347 - timestamp: 1719933418796 + size: 239539 + timestamp: 1726898022361 - kind: conda name: geopy version: 2.4.1 @@ -6388,155 +6444,154 @@ packages: timestamp: 1709140298005 - kind: conda name: geos - version: 3.12.2 - build: h5a68840_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/geos-3.12.2-h5a68840_1.conda - sha256: e8606bbf3ebbaf2817d65d4b48180cc1d828a030061e0a5ef55281f9cc7f1e28 - md5: 019e3460f99eb7c2198c532c50d08791 + version: 3.13.0 + build: h5888daf_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda + sha256: 5c70d6d16e044859edca85feb9d4f1c3c6062aaf88d650826f5ccdf8c44336de + md5: 40b4ab956c90390e407bb177f8a58bab depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 license: LGPL-2.1-only purls: [] - size: 1561663 - timestamp: 1721747131206 + size: 1869233 + timestamp: 1725676083126 - kind: conda name: geos - version: 3.12.2 - build: he02047a_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.2-he02047a_1.conda - sha256: bc3860e6689be6968ca5bae3660f43dd3e22f4dd61c0bfc99ffd0d0daf4f7a73 - md5: aab9195bc018b82dc77a84584b36cce9 + version: 3.13.0 + build: h5a68840_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/geos-3.13.0-h5a68840_0.conda + sha256: 2b46d6f304f70dfca304169299908b558bd1e83992acb5077766eefa3d3fe35f + md5: 08a30fe29a645fc5c768c0968db116d3 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: LGPL-2.1-only purls: [] - size: 1737633 - timestamp: 1721746525671 + size: 1665961 + timestamp: 1725676536384 - kind: conda name: geos - version: 3.12.2 - build: hf036a51_1 - build_number: 1 + version: 3.13.0 + build: hac325c4_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/geos-3.12.2-hf036a51_1.conda - sha256: 1d5ec9da8a543885228aa7ca9fabfcacd653b0f14e8d175bb83de60afcffc166 - md5: fbb2688b537dafd5fb554d0b7ef27397 + url: https://conda.anaconda.org/conda-forge/osx-64/geos-3.13.0-hac325c4_0.conda + sha256: 7e3201780fda37f23623e384557eb66047942db1c2fe0a7453c0caf301ec8bbb + md5: 905fbe84dd83254e4e0db610123dd32d depends: - __osx >=10.13 - - libcxx >=16 + - libcxx >=17 license: LGPL-2.1-only purls: [] - size: 1482492 - timestamp: 1721747118528 + size: 1577166 + timestamp: 1725676182968 - kind: conda name: geotiff version: 1.7.3 - build: h232476a_2 - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h232476a_2.conda - sha256: cf512663c8681e5e5a3d30046860ad06a8a4700b217d34c348f974ea481a0b18 - md5: 8968032e8f14d84b40a20437707f8ec7 + build: h2b6e260_3 + build_number: 3 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/geotiff-1.7.3-h2b6e260_3.conda + sha256: 7e58d94340a499c3c62022ba070231f1dcc7c55a98f8f2a7e982d2071dfd421c + md5: bbc58a544b03990b3bc8c2139cc6c34f depends: + - __osx >=10.13 + - libcxx >=17 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.3.1,<2.0a0 - - proj >=9.4.1,<9.5.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - proj >=9.5.0,<9.6.0a0 - zlib license: MIT license_family: MIT purls: [] - size: 123406 - timestamp: 1722335928788 + size: 115513 + timestamp: 1726603109733 - kind: conda name: geotiff version: 1.7.3 - build: h4bbec01_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/geotiff-1.7.3-h4bbec01_2.conda - sha256: a56e2154bfd21588ffde48ae14f906ea6b7e0eb49f71b2e3fb320cd066c22503 - md5: d83428f874b4fc2d204613ad7ad42b6d + build: h496ac4d_3 + build_number: 3 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h496ac4d_3.conda + sha256: 116120a2f4411618800c2a5ce246dfc313298e545ce1ffaa85f28cc3ac2236ac + md5: fb20f424102030f3952532cc7aebdbd8 depends: - - __osx >=10.13 - - libcxx >=16 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.3.1,<2.0a0 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 - zlib license: MIT license_family: MIT purls: [] - size: 115552 - timestamp: 1722335565552 + size: 123087 + timestamp: 1726603487099 - kind: conda name: geotiff version: 1.7.3 - build: hf7fa9e8_2 - build_number: 2 + build: h77b800c_3 + build_number: 3 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-hf7fa9e8_2.conda - sha256: 3ecd04a14cb3d64f0641828aa9e918895b508809aedf7b5b0ec712c6957b5815 - md5: 1d6bdc6b2c62c8cc90c67b50142d7b7f + url: https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-h77b800c_3.conda + sha256: 94c7d002c70a4802a78ac2925ad6b36327cff85e0af6af2825b11a968c81ec20 + md5: 4eb52aecb43e7c72f8e4fca0c386354e depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=13 - libjpeg-turbo >=3.0.0,<4.0a0 - - libstdcxx-ng >=12 - - libtiff >=4.6.0,<4.7.0a0 + - libstdcxx >=13 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.3.1,<2.0a0 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 - zlib license: MIT license_family: MIT purls: [] - size: 131714 - timestamp: 1722335412421 + size: 131394 + timestamp: 1726602918349 - kind: conda name: gflags version: 2.2.2 - build: hb1e8313_1004 - build_number: 1004 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hb1e8313_1004.tar.bz2 - sha256: 39540f879057ae529cad131644af111a8c3c48b384ec6212de6a5381e0863948 - md5: 3f59cc77a929537e42120faf104e0d16 + build: h5888daf_1005 + build_number: 1005 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda + sha256: 6c33bf0c4d8f418546ba9c250db4e4221040936aef8956353bc764d4877bc39a + md5: d411fc29e338efb48c5fd4576d71d881 depends: - - libcxx >=10.0.1 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 license: BSD-3-Clause license_family: BSD purls: [] - size: 94612 - timestamp: 1599590973213 + size: 119654 + timestamp: 1726600001928 - kind: conda name: gflags version: 2.2.2 - build: he1b5a44_1004 - build_number: 1004 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2 - sha256: a853c0cacf53cfc59e1bca8d6e5cdfe9f38fce836f08c2a69e35429c2a492e77 - md5: cddaf2c63ea4a5901cf09524c490ecdc + build: hac325c4_1005 + build_number: 1005 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hac325c4_1005.conda + sha256: c0bea66f71a6f4baa8d4f0248e17f65033d558d9e882c0af571b38bcca3e4b46 + md5: a26de8814083a6971f14f9c8c3cb36c2 depends: - - libgcc-ng >=7.5.0 - - libstdcxx-ng >=7.5.0 + - __osx >=10.13 + - libcxx >=17 license: BSD-3-Clause license_family: BSD purls: [] - size: 116549 - timestamp: 1594303828933 + size: 84946 + timestamp: 1726600054963 - kind: conda name: giflib version: 5.2.2 @@ -6636,13 +6691,13 @@ packages: timestamp: 1711634622644 - kind: conda name: griffe - version: 1.3.0 + version: 1.3.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.0-pyhd8ed1ab_0.conda - sha256: 345891673e53b7cef21e2aea481475015bf3f93d5128a9531f87aa4b6fbd05f5 - md5: 9f55045a47cb6fffdc493c10a2b7f463 + url: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda + sha256: 7e9420f5bd3e1c538dd2e4a6ba76e197c8e173593e575d1fb4ce5761fafdeea8 + md5: db8382698d1f5307b33fbc17bafc1e4e depends: - astunparse >=1.6 - colorama >=0.4 @@ -6650,8 +6705,8 @@ packages: license: ISC purls: - pkg:pypi/griffe?source=hash-mapping - size: 97711 - timestamp: 1726136969053 + size: 97688 + timestamp: 1728324149441 - kind: conda name: h11 version: 0.14.0 @@ -6880,13 +6935,13 @@ packages: timestamp: 1598856368685 - kind: conda name: httpcore - version: 1.0.5 + version: 1.0.6 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.5-pyhd8ed1ab_0.conda - sha256: 4025644200eefa0598e4600a66fd4804a57d9fd7054a5c8c45e508fd875e0b84 - md5: a6b9a0158301e697e4d0a36a3d60e133 + url: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda + sha256: 8952c3f1eb18bf4d7e813176c3b23e0af4e863e8b05087e73f74f371d73077ca + md5: b8e1901ef9a215fc41ecfb6bef7e0943 depends: - anyio >=3.0,<5.0 - certifi @@ -6898,8 +6953,8 @@ packages: license_family: BSD purls: - pkg:pypi/httpcore?source=hash-mapping - size: 45816 - timestamp: 1711597091407 + size: 45711 + timestamp: 1727821031365 - kind: conda name: httpx version: 0.27.2 @@ -7000,13 +7055,13 @@ packages: timestamp: 1720853966338 - kind: conda name: identify - version: 2.6.0 + version: 2.6.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda - sha256: 4a2889027df94d51be283536ac235feba77eaa42a0d051f65cd07ba824b324a6 - md5: f80cc5989f445f23b1622d6c455896d9 + url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + sha256: dc752392f327e64e32bc3122758b2d8951aec9d6e6aa888463c73d18a10e3c56 + md5: 43f629202f9eec21be5f71171fb5daf8 depends: - python >=3.6 - ukkonen @@ -7014,25 +7069,25 @@ packages: license_family: MIT purls: - pkg:pypi/identify?source=hash-mapping - size: 78197 - timestamp: 1720413864262 + size: 78078 + timestamp: 1726369674008 - kind: conda name: idna - version: '3.8' + version: '3.10' build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda - sha256: 8660d38b272d3713ec8ac5ae918bc3bc80e1b81e1a7d61df554bded71ada6110 - md5: 99e164522f6bdf23c177c8d9ae63f975 + url: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda + sha256: 8c57fd68e6be5eecba4462e983aed7e85761a519aab80e834bbd7794d4b545b2 + md5: 7ba2ede0e7c795ff95088daf0dc59753 depends: - python >=3.6 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/idna?source=hash-mapping - size: 49275 - timestamp: 1724450633325 + size: 49837 + timestamp: 1726459583613 - kind: conda name: importlib-metadata version: 8.5.0 @@ -7064,6 +7119,7 @@ packages: - importlib_resources >=6.4.5,<6.4.6.0a0 - python >=3.8 license: Apache-2.0 + license_family: APACHE purls: [] size: 9595 timestamp: 1725921472017 @@ -7098,8 +7154,9 @@ packages: constrains: - importlib-resources >=6.4.5,<6.4.6.0a0 license: Apache-2.0 + license_family: APACHE purls: - - pkg:pypi/importlib-resources?source=compressed-mapping + - pkg:pypi/importlib-resources?source=hash-mapping size: 32725 timestamp: 1725921462405 - kind: conda @@ -7226,13 +7283,13 @@ packages: timestamp: 1719845667420 - kind: conda name: ipython - version: 8.27.0 + version: 8.28.0 build: pyh707e725_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda - sha256: 4eaa22b1afdbd0076ab1cc8da99d9c62f5c5f14cd0a30ff99c133e22f2db5a58 - md5: 0ed09f0c0f62f50b4b7dd2744af13629 + url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh707e725_0.conda + sha256: b18adc659d43fc8eef026312a74cd39944ffe9d8decee71ec60a1974fb8ec86c + md5: 7142a7dff2a47e40b55d304decadd78a depends: - __unix - decorator @@ -7250,18 +7307,18 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/ipython?source=hash-mapping - size: 598878 - timestamp: 1725050237172 + - pkg:pypi/ipython?source=compressed-mapping + size: 600094 + timestamp: 1727944801855 - kind: conda name: ipython - version: 8.27.0 + version: 8.28.0 build: pyh7428d3b_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh7428d3b_0.conda - sha256: 2826fae9530bf5ea53b3b825483d9bd1c01b5635aebc37e0f56003bab434ade6 - md5: d7f3d6377b3988475bd1fa6493b7b115 + url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.28.0-pyh7428d3b_0.conda + sha256: 8d2480d5593854e6bd994329a0b1819d39b35c5ee9e85043737df962f236a948 + md5: 4df2592ebe3672f282a02c557db209ee depends: - __win - colorama @@ -7280,8 +7337,8 @@ packages: license_family: BSD purls: - pkg:pypi/ipython?source=hash-mapping - size: 600176 - timestamp: 1725050732048 + size: 599622 + timestamp: 1727945272442 - kind: conda name: isoduration version: 20.11.0 @@ -7356,37 +7413,35 @@ packages: timestamp: 1714665585214 - kind: conda name: json-c - version: '0.17' - build: h1220068_1 - build_number: 1 + version: '0.18' + build: h6688a6e_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda - sha256: 0caf06ccfbd6f9a7b3a1e09fa83e318c9e84f2d1c1003a9e486f2600f4096720 - md5: f8f0f0c4338bad5c34a4e9e11460481d + url: https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda + sha256: 09e706cb388d3ea977fabcee8e28384bdaad8ce1fc49340df5f868a2bd95a7da + md5: 38f5dbc9ac808e31c00650f7be1db93f depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 83682 - timestamp: 1720812978049 + size: 82709 + timestamp: 1726487116178 - kind: conda name: json-c - version: '0.17' - build: h6253ea5_1 - build_number: 1 + version: '0.18' + build: hc62ec3d_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.17-h6253ea5_1.conda - sha256: 66ddd1a4d643c7c800a1bb8e61f5f4198ec102be37db9a6d2e037004442eff8d - md5: fb72a2ef514c2df4ba035187945a6dcf + url: https://conda.anaconda.org/conda-forge/osx-64/json-c-0.18-hc62ec3d_0.conda + sha256: b58f8002318d6b880a98e1b0aa943789b3b0f49334a3bdb9c19b463a0b799cad + md5: 2c5a3c42de607dda0cfa0edd541fd279 depends: - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 72163 - timestamp: 1720813111542 + size: 71514 + timestamp: 1726487153769 - kind: conda name: json5 version: 0.9.25 @@ -7483,23 +7538,22 @@ packages: timestamp: 1720529611305 - kind: conda name: jsonschema-specifications - version: 2023.12.1 + version: 2024.10.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda - sha256: a9630556ddc3121c0be32f4cbf792dd9102bd380d5cd81d57759d172cf0c2da2 - md5: a0e4efb5f35786a05af4809a2fb1f855 + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda + sha256: 82f8bed0f21dc0b3aff40dd4e39d77e85b93b0417bc5659b001e0109341b8b98 + md5: 720745920222587ef942acfbc578b584 depends: - - importlib_resources >=1.4.0 - python >=3.8 - referencing >=0.31.0 license: MIT license_family: MIT purls: - pkg:pypi/jsonschema-specifications?source=hash-mapping - size: 16431 - timestamp: 1703778502971 + size: 16165 + timestamp: 1728418976382 - kind: conda name: jsonschema-with-format-nongpl version: 4.23.0 @@ -7545,15 +7599,15 @@ packages: timestamp: 1712707521811 - kind: conda name: jupyter_client - version: 8.6.2 + version: 8.6.3 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda - sha256: 634f065cdd1d0aacd4bb6848ebf240dcebc8578135d65f4ad4aa42b2276c4e0c - md5: 3cdbb2fa84490e5fd44c9f9806c0d292 + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + sha256: 4419c85e209a715f551a5c9bead746f29ee9d0fc41e772a76db3868622795671 + md5: a14218cfb29662b4a19ceb04e93e298e depends: - - importlib_metadata >=4.8.3 + - importlib-metadata >=4.8.3 - jupyter_core >=4.12,!=5.0.* - python >=3.8 - python-dateutil >=2.8.2 @@ -7564,66 +7618,52 @@ packages: license_family: BSD purls: - pkg:pypi/jupyter-client?source=hash-mapping - size: 106248 - timestamp: 1716472312833 -- kind: conda - name: jupyter_core - version: 5.7.2 - build: py312h2e8e312_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/jupyter_core-5.7.2-py312h2e8e312_0.conda - sha256: bf2a315febec297e05fa77e39bd371d53553bd1c347e495ac34198fec18afb11 - md5: 3ed5c1981d05f125696f392407d36ce2 - depends: - - platformdirs >=2.5 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 - - pywin32 >=300 - - traitlets >=5.3 - license: BSD-3-Clause - license_family: BSD - purls: - - pkg:pypi/jupyter-core?source=hash-mapping - size: 109880 - timestamp: 1710257719549 + size: 106055 + timestamp: 1726610805505 - kind: conda name: jupyter_core version: 5.7.2 - build: py312h7900ff3_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.7.2-py312h7900ff3_0.conda - sha256: 22a6259c2b139191c76ed7633d1865757b3c15007989f6c74304a80f28e5a262 - md5: eee5a2e3465220ed87196bbb5665f420 + build: pyh31011fe_1 + build_number: 1 + subdir: noarch + noarch: python + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda + sha256: 732b1e8536bc22a5a174baa79842d79db2f4956d90293dd82dc1b3f6099bcccd + md5: 0a2980dada0dd7fd0998f0342308b1b1 depends: + - __unix - platformdirs >=2.5 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 + - python >=3.8 - traitlets >=5.3 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/jupyter-core?source=hash-mapping - size: 92843 - timestamp: 1710257533875 + size: 57671 + timestamp: 1727163547058 - kind: conda name: jupyter_core version: 5.7.2 - build: py312hb401068_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/jupyter_core-5.7.2-py312hb401068_0.conda - sha256: 3e57d1eaf22c793711367335f9f8b647c011b64a95bfc796b50967a4b2ae27c2 - md5: a205e28ce7ab71773dcaaf94f6418612 + build: pyh5737063_1 + build_number: 1 + subdir: noarch + noarch: python + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh5737063_1.conda + sha256: 7c903b2d62414c3e8da1f78db21f45b98de387aae195f8ca959794113ba4b3fd + md5: 46d87d1c0ea5da0aae36f77fa406e20d depends: + - __win + - cpython - platformdirs >=2.5 - - python >=3.12,<3.13.0a0 - - python_abi 3.12.* *_cp312 + - python >=3.8 + - pywin32 >=300 - traitlets >=5.3 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/jupyter-core?source=hash-mapping - size: 92679 - timestamp: 1710257658978 + size: 58269 + timestamp: 1727164026641 - kind: conda name: jupyter_events version: 0.10.0 @@ -7978,7 +8018,7 @@ packages: md5: d3592435917b62a8becff3a60db674f6 depends: - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 @@ -7997,7 +8037,7 @@ packages: md5: 1442db8f03517834843666c422238c9b depends: - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 license: MIT license_family: MIT purls: [] @@ -8014,7 +8054,7 @@ packages: depends: - libgcc-ng >=12 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 license: MIT license_family: MIT purls: [] @@ -8022,20 +8062,22 @@ packages: timestamp: 1701647787198 - kind: conda name: ld_impl_linux-64 - version: '2.40' - build: hf3520f5_7 - build_number: 7 + version: '2.43' + build: h712a8e2_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - sha256: 764b6950aceaaad0c67ef925417594dd14cd2e22fff864aeef455ac259263d15 - md5: b80f2f396ca2c28b8c14c437a4ed1e74 + url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_1.conda + sha256: 0c21387f9a411e3d1f7f2969026bacfece133c8f1e72faea9cde29c0c19e1f3a + md5: 83e1364586ceb8d0739fbc85b5c95837 + depends: + - __glibc >=2.17,<3.0.a0 constrains: - - binutils_impl_linux-64 2.40 + - binutils_impl_linux-64 2.43 license: GPL-3.0-only license_family: GPL purls: [] - size: 707602 - timestamp: 1718625640445 + size: 669616 + timestamp: 1727304687962 - kind: conda name: lerc version: 4.0.0 @@ -8085,66 +8127,66 @@ packages: timestamp: 1657977526749 - kind: conda name: libabseil - version: '20240116.2' - build: cxx17_he02047a_1 + version: '20240722.0' + build: cxx17_h5888daf_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda - sha256: 945396726cadae174a661ce006e3f74d71dbd719219faf7cc74696b267f7b0b5 - md5: c48fc56ec03229f294176923c3265c05 + url: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda + sha256: 8f91429091183c26950f1e7ffa730e8632f0627ba35d2fccd71df31628c9b4e5 + md5: e1f604644fe8d78e22660e2fec6756bc depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 constrains: - - abseil-cpp =20240116.2 - - libabseil-static =20240116.2=cxx17* + - libabseil-static =20240722.0=cxx17* + - abseil-cpp =20240722.0 license: Apache-2.0 license_family: Apache purls: [] - size: 1264712 - timestamp: 1720857377573 + size: 1310521 + timestamp: 1727295454064 - kind: conda name: libabseil - version: '20240116.2' - build: cxx17_he0c23c2_1 + version: '20240722.0' + build: cxx17_hac325c4_1 build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240116.2-cxx17_he0c23c2_1.conda - sha256: aafa7993698420ef786c145f660e6822139c02cf9230fbad43efff6d4828defc - md5: 19725e54b7f996e0a5748ec5e9e37ae9 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240722.0-cxx17_hac325c4_1.conda + sha256: b548e80280242ad1d93d8d7fb48a30af7e4124959ba2031c65c9675b98163652 + md5: 40373920232a6ac0404eee9cf39a9f09 depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __osx >=10.13 + - libcxx >=17 constrains: - - libabseil-static =20240116.2=cxx17* - - abseil-cpp =20240116.2 + - abseil-cpp =20240722.0 + - libabseil-static =20240722.0=cxx17* license: Apache-2.0 license_family: Apache purls: [] - size: 1802886 - timestamp: 1720857653184 + size: 1170354 + timestamp: 1727295597292 - kind: conda name: libabseil - version: '20240116.2' - build: cxx17_hf036a51_1 + version: '20240722.0' + build: cxx17_he0c23c2_1 build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240116.2-cxx17_hf036a51_1.conda - sha256: 396d18f39d5207ecae06fddcbc6e5f20865718939bc4e0ea9729e13952833aac - md5: d6c78ca84abed3fea5f308ac83b8f54e + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240722.0-cxx17_he0c23c2_1.conda + sha256: 52ff148dee1871ef1d5c298bae20309707e866b44714a0a333a5ed2cf9a38832 + md5: 3f59a73b07a05530b252ecb07dd882b9 depends: - - __osx >=10.13 - - libcxx >=16 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - abseil-cpp =20240116.2 - - libabseil-static =20240116.2=cxx17* + - libabseil-static =20240722.0=cxx17* + - abseil-cpp =20240722.0 license: Apache-2.0 license_family: Apache purls: [] - size: 1124364 - timestamp: 1720857589333 + size: 1777570 + timestamp: 1727296115119 - kind: conda name: libaec version: 1.1.3 @@ -8268,24 +8310,24 @@ packages: - kind: conda name: libarrow version: 17.0.0 - build: h29daf90_13_cpu - build_number: 13 + build: h297d146_20_cpu + build_number: 20 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h29daf90_13_cpu.conda - sha256: 1a0f66e822f4cde398b15fe7ac94cb4197635798da9feebcb88c900637e05f77 - md5: d0ea8c4474c45aae86eff71a0f293013 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h297d146_20_cpu.conda + sha256: 68e302653d238de390bb7ead3e96f3bee18a9dcde7c827e5f9827c48bf7cadd1 + md5: c0b6bc35ca65b358e76d4457eb480100 depends: - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - bzip2 >=1.0.8,<2.0a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 + - libabseil >=20240722.0,<20240723.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl >=8.9.1,<9.0a0 - - libgoogle-cloud >=2.28.0,<2.29.0a0 - - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 + - libcurl >=8.10.1,<9.0a0 + - libgoogle-cloud >=2.29.0,<2.30.0a0 + - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - libre2-11 >=2023.9.1,<2024.0a0 - libutf8proc >=2.8.0,<3.0a0 - libzlib >=1.3.1,<2.0a0 @@ -8294,45 +8336,44 @@ packages: - re2 - snappy >=1.2.1,<1.3.0a0 - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 constrains: - - arrow-cpp <0.0a0 - parquet-cpp <0.0a0 - apache-arrow-proc =*=cpu + - arrow-cpp <0.0a0 license: Apache-2.0 - license_family: APACHE purls: [] - size: 5128979 - timestamp: 1725215183038 + size: 5064357 + timestamp: 1728535632310 - kind: conda name: libarrow version: 17.0.0 - build: h8d2e343_13_cpu - build_number: 13 + build: h364f349_20_cpu + build_number: 20 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda - sha256: 91e639761f29ee1ca144e92110d47c8e68038f26201eef25585a48826e037fb2 - md5: dc379f362829d5df5ce6722565110029 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h364f349_20_cpu.conda + sha256: 58e01b7214111281f897a859c09bc02b14f6bbc9fb97c93aa873ef492a8e55cb + md5: 5d3c3264432d2dd25f8aed1e97102b94 depends: - __glibc >=2.17,<3.0.a0 - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.8.0,<1.8.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-files-datalake-cpp >=12.11.0,<12.11.1.0a0 + - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-files-datalake-cpp >=12.12.0,<12.12.1.0a0 - bzip2 >=1.0.8,<2.0a0 - gflags >=2.2.2,<2.3.0a0 - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 + - libabseil >=20240722.0,<20240723.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - libgcc >=13 - - libgoogle-cloud >=2.28.0,<2.29.0a0 - - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 + - libgoogle-cloud >=2.29.0,<2.30.0a0 + - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - libre2-11 >=2023.9.1,<2024.0a0 - libstdcxx >=13 - libutf8proc >=2.8.0,<3.0a0 @@ -8343,40 +8384,39 @@ packages: - snappy >=1.2.1,<1.3.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - - arrow-cpp <0.0a0 - - parquet-cpp <0.0a0 - apache-arrow-proc =*=cpu + - parquet-cpp <0.0a0 + - arrow-cpp <0.0a0 license: Apache-2.0 - license_family: APACHE purls: [] - size: 8512685 - timestamp: 1725214716301 + size: 8528014 + timestamp: 1728534747835 - kind: conda name: libarrow version: 17.0.0 - build: ha60c65e_13_cpu - build_number: 13 + build: h74c0fbd_20_cpu + build_number: 20 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-ha60c65e_13_cpu.conda - sha256: d8096066ce779a82cbb2045030f8095ed5689cac2ac1ee0c58251e7f448f1a87 - md5: 4cdf43459510697d824c377428a120b1 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h74c0fbd_20_cpu.conda + sha256: 216533708248423936ac11dc5b289088986ed02372f238a3cfc0f1e891174891 + md5: b10c3ff291e32a7be99948500241dd22 depends: - __osx >=10.13 - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.8.0,<1.8.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-files-datalake-cpp >=12.11.0,<12.11.1.0a0 + - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-files-datalake-cpp >=12.12.0,<12.12.1.0a0 - bzip2 >=1.0.8,<2.0a0 - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 + - libabseil >=20240722.0,<20240723.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - - libcxx >=17 - - libgoogle-cloud >=2.28.0,<2.29.0a0 - - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 + - libcxx >=18 + - libgoogle-cloud >=2.29.0,<2.30.0a0 + - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - libre2-11 >=2023.9.1,<2024.0a0 - libutf8proc >=2.8.0,<3.0a0 - libzlib >=1.3.1,<2.0a0 @@ -8390,199 +8430,189 @@ packages: - parquet-cpp <0.0a0 - apache-arrow-proc =*=cpu license: Apache-2.0 - license_family: APACHE purls: [] - size: 5899274 - timestamp: 1725214352592 + size: 5943895 + timestamp: 1728533748437 - kind: conda name: libarrow-acero version: 17.0.0 - build: h5888daf_13_cpu - build_number: 13 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda - sha256: cda9e38ad7af7ba72416031b089de5048f8526ae586149ff9f6506366689d699 - md5: b654d072b8d5da807495e49b28a0b884 + build: h240833e_20_cpu + build_number: 20 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_20_cpu.conda + sha256: 8334e78ef6d83390a240390bbdf1295b3c5d54cdbc1e3079f364e40e8947f58c + md5: 1a60d732f131749c172cca38a7321fb2 depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h8d2e343_13_cpu - - libgcc >=13 - - libstdcxx >=13 + - __osx >=10.13 + - libarrow 17.0.0 h74c0fbd_20_cpu + - libcxx >=18 license: Apache-2.0 - license_family: APACHE purls: [] - size: 609649 - timestamp: 1725214754397 + size: 519923 + timestamp: 1728533863684 - kind: conda name: libarrow-acero version: 17.0.0 - build: hac325c4_13_cpu - build_number: 13 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-hac325c4_13_cpu.conda - sha256: c6195a789edb257746ca9f8648419c9efdb67e0ef62d2ba818eaa921f94e90af - md5: 218079f1d0ba0a46246db86a9e96c417 + build: h5888daf_20_cpu + build_number: 20 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_20_cpu.conda + sha256: fd79ee1d7ba766ad4661f7df43a5c2f08c01bb04d41498fc2982b71e4dac7d30 + md5: 011c07ec81754028bceebaa0ac839e12 depends: - - __osx >=10.13 - - libarrow 17.0.0 ha60c65e_13_cpu - - libcxx >=17 + - __glibc >=2.17,<3.0.a0 + - libarrow 17.0.0 h364f349_20_cpu + - libgcc >=13 + - libstdcxx >=13 license: Apache-2.0 - license_family: APACHE purls: [] - size: 515115 - timestamp: 1725214443841 + size: 608612 + timestamp: 1728534791375 - kind: conda name: libarrow-acero version: 17.0.0 - build: he0c23c2_13_cpu - build_number: 13 + build: hac47afa_20_cpu + build_number: 20 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-he0c23c2_13_cpu.conda - sha256: 850b28abba3e40302cb5425ffb96f085d2089decafb2e80d85b4f8b44c2c777d - md5: 1a38e993ef119557596ae20cd68a1207 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_20_cpu.conda + sha256: 9c167a0a48444a5ad763f9f0d6232882278d441ce5ac9c2aedc11af6fb10afa3 + md5: 58aaa12782e5fce95d327bed789cd251 depends: - - libarrow 17.0.0 h29daf90_13_cpu + - libarrow 17.0.0 h297d146_20_cpu - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 license: Apache-2.0 - license_family: APACHE purls: [] - size: 445286 - timestamp: 1725215254997 + size: 444400 + timestamp: 1728535698248 - kind: conda name: libarrow-dataset version: 17.0.0 - build: h5888daf_13_cpu - build_number: 13 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda - sha256: b3fac9bc9a399670d6993738d018324d6e1b0a85755b484204405bb72efabc4e - md5: cd2c36e8865b158b82f61c6aac28b7e1 + build: h240833e_20_cpu + build_number: 20 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_20_cpu.conda + sha256: 372a191062dbac9684b2fd7141a974d388768292f1afc007b78d060e48c613a2 + md5: 33329715c8411290070ee0a25ec512ff depends: - - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h8d2e343_13_cpu - - libarrow-acero 17.0.0 h5888daf_13_cpu - - libgcc >=13 - - libparquet 17.0.0 h39682fd_13_cpu - - libstdcxx >=13 + - __osx >=10.13 + - libarrow 17.0.0 h74c0fbd_20_cpu + - libarrow-acero 17.0.0 h240833e_20_cpu + - libcxx >=18 + - libparquet 17.0.0 hc957f30_20_cpu license: Apache-2.0 - license_family: APACHE purls: [] - size: 582848 - timestamp: 1725214820464 + size: 511557 + timestamp: 1728535062865 - kind: conda name: libarrow-dataset version: 17.0.0 - build: hac325c4_13_cpu - build_number: 13 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-hac325c4_13_cpu.conda - sha256: de66e86133af737ecafd67a043f2756afb78fe77503bcf8e1dc2b73a706f55b5 - md5: d7609f5867208b278655602ac636363b + build: h5888daf_20_cpu + build_number: 20 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_20_cpu.conda + sha256: 3c940c82d8410e6a7aac17c8ba48f415a6b16aad43c9de9ff3a64ac69cbbcaa3 + md5: 0f71b9865661fd45b5d07ab6db734c33 depends: - - __osx >=10.13 - - libarrow 17.0.0 ha60c65e_13_cpu - - libarrow-acero 17.0.0 hac325c4_13_cpu - - libcxx >=17 - - libparquet 17.0.0 hf1b0f52_13_cpu + - __glibc >=2.17,<3.0.a0 + - libarrow 17.0.0 h364f349_20_cpu + - libarrow-acero 17.0.0 h5888daf_20_cpu + - libgcc >=13 + - libparquet 17.0.0 h6bd9018_20_cpu + - libstdcxx >=13 license: Apache-2.0 - license_family: APACHE purls: [] - size: 506575 - timestamp: 1725215307580 + size: 583751 + timestamp: 1728534883341 - kind: conda name: libarrow-dataset version: 17.0.0 - build: he0c23c2_13_cpu - build_number: 13 + build: hac47afa_20_cpu + build_number: 20 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-he0c23c2_13_cpu.conda - sha256: 12b0395dc22a2c3fb03e8b8ab32bcf4ff08947b8611b2a1e9c49644d8391893c - md5: dd78096e1335abc3c7bf6915d0ac7c34 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_20_cpu.conda + sha256: 49355faa6f9e4fb5ea6701af629c2ea1f0d61cbba8f04bd04ee6a28851871988 + md5: f164dad54f67d20dd189bd6a7067966d depends: - - libarrow 17.0.0 h29daf90_13_cpu - - libarrow-acero 17.0.0 he0c23c2_13_cpu - - libparquet 17.0.0 ha915800_13_cpu + - libarrow 17.0.0 h297d146_20_cpu + - libarrow-acero 17.0.0 hac47afa_20_cpu + - libparquet 17.0.0 h59f2d37_20_cpu - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 license: Apache-2.0 - license_family: APACHE purls: [] - size: 427535 - timestamp: 1725215469376 + size: 432396 + timestamp: 1728535944425 - kind: conda name: libarrow-substrait version: 17.0.0 - build: h1f0e801_13_cpu - build_number: 13 + build: ha9530af_20_cpu + build_number: 20 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-h1f0e801_13_cpu.conda - sha256: 637c2652cfe676d6949f7953de7d51e90bc35863c3a114c29795b5b0e119699c - md5: b618c36e7eff7a28a53bde4d9aa017e0 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_20_cpu.conda + sha256: 3e111be012bae997a2e65f5282b33cc2a6f9655cf0831a3db95261933c3010b4 + md5: 6b79fa44f56144ce6682d64ed1d64b7c depends: - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libarrow 17.0.0 h29daf90_13_cpu - - libarrow-acero 17.0.0 he0c23c2_13_cpu - - libarrow-dataset 17.0.0 he0c23c2_13_cpu - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libarrow 17.0.0 h297d146_20_cpu + - libarrow-acero 17.0.0 hac47afa_20_cpu + - libarrow-dataset 17.0.0 hac47afa_20_cpu + - libprotobuf >=5.27.5,<5.27.6.0a0 - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 license: Apache-2.0 - license_family: APACHE purls: [] - size: 382757 - timestamp: 1725215569161 + size: 365133 + timestamp: 1728536047685 - kind: conda name: libarrow-substrait version: 17.0.0 - build: hba007a9_13_cpu - build_number: 13 + build: hdefb866_20_cpu + build_number: 20 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hba007a9_13_cpu.conda - sha256: 729523ec54db45127b1e644454d3612ce48196c27426ae5c2ace022b6791bf53 - md5: 883ffa72318b7952df9a21243ab2f281 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_20_cpu.conda + sha256: 8e5370a0d28e1c69cf7d1169d786a30344d41f134b795e9d56665ff76c728153 + md5: e40c150c8d31081099f4e592028ff82d depends: - __osx >=10.13 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libarrow 17.0.0 ha60c65e_13_cpu - - libarrow-acero 17.0.0 hac325c4_13_cpu - - libarrow-dataset 17.0.0 hac325c4_13_cpu - - libcxx >=17 - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libarrow 17.0.0 h74c0fbd_20_cpu + - libarrow-acero 17.0.0 h240833e_20_cpu + - libarrow-dataset 17.0.0 h240833e_20_cpu + - libcxx >=18 + - libprotobuf >=5.27.5,<5.27.6.0a0 license: Apache-2.0 - license_family: APACHE purls: [] - size: 478730 - timestamp: 1725215444041 + size: 459555 + timestamp: 1728535286463 - kind: conda name: libarrow-substrait version: 17.0.0 - build: hf54134d_13_cpu - build_number: 13 + build: he882d9a_20_cpu + build_number: 20 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda - sha256: 01ff52d5b866f3174018c81dee808fbef1101f2cff05cc5f29c80ff68cc8796c - md5: 46f41533959eee8826c09e55976b8c06 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_20_cpu.conda + sha256: 1fdd139fd70aaf309d0c05a7fc7712747634a61fa840319c05f27b7445c7c8bc + md5: faffcf59a3de9cc7bf0d8b0962ecf6eb depends: - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libarrow 17.0.0 h8d2e343_13_cpu - - libarrow-acero 17.0.0 h5888daf_13_cpu - - libarrow-dataset 17.0.0 h5888daf_13_cpu + - libabseil >=20240722.0,<20240723.0a0 + - libarrow 17.0.0 h364f349_20_cpu + - libarrow-acero 17.0.0 h5888daf_20_cpu + - libarrow-dataset 17.0.0 h5888daf_20_cpu - libgcc >=13 - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libprotobuf >=5.27.5,<5.27.6.0a0 - libstdcxx >=13 license: Apache-2.0 - license_family: APACHE purls: [] - size: 550883 - timestamp: 1725214851656 + size: 515897 + timestamp: 1728534924050 - kind: conda name: libblas version: 3.9.0 @@ -8608,46 +8638,46 @@ packages: - kind: conda name: libblas version: 3.9.0 - build: 23_linux64_openblas - build_number: 23 + build: 24_linux64_openblas + build_number: 24 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-23_linux64_openblas.conda - sha256: edb1cee5da3ac4936940052dcab6969673ba3874564f90f5110f8c11eed789c2 - md5: 96c8450a40aa2b9733073a9460de972c + url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda + sha256: 3097f7913bda527d4fe9f824182b314e130044e582455037fca6f4e97965d83c + md5: 80aea6603a6813b16ec119d00382b772 depends: - libopenblas >=0.3.27,<0.3.28.0a0 - libopenblas >=0.3.27,<1.0a0 constrains: - - liblapacke 3.9.0 23_linux64_openblas - - libcblas 3.9.0 23_linux64_openblas - - liblapack 3.9.0 23_linux64_openblas - blas * openblas + - liblapack 3.9.0 24_linux64_openblas + - libcblas 3.9.0 24_linux64_openblas + - liblapacke 3.9.0 24_linux64_openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 14880 - timestamp: 1721688759937 + size: 14981 + timestamp: 1726668454790 - kind: conda name: libblas version: 3.9.0 - build: 23_win64_mkl - build_number: 23 + build: 24_win64_mkl + build_number: 24 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-23_win64_mkl.conda - sha256: fd52eb0ec4d0ca5727317dd608c41dacc8ccfc7e21d943b7aafbbf10ae28c97c - md5: 693407a31c27e70c750b5ae153251d9a + url: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-24_win64_mkl.conda + sha256: 8b4cd602ae089d8c5832054ead452d6a1820c8f9c3b190faf3e867f5939810e2 + md5: ea127210707251a33116b437c22b8dad depends: - mkl 2024.1.0 h66d3029_694 constrains: - blas * mkl - - liblapack 3.9.0 23_win64_mkl - - libcblas 3.9.0 23_win64_mkl - - liblapacke 3.9.0 23_win64_mkl + - liblapack 3.9.0 24_win64_mkl + - libcblas 3.9.0 24_win64_mkl + - liblapacke 3.9.0 24_win64_mkl license: BSD-3-Clause license_family: BSD purls: [] - size: 5192100 - timestamp: 1721689573083 + size: 5183540 + timestamp: 1726669397923 - kind: conda name: libbrotlicommon version: 1.1.0 @@ -8830,90 +8860,87 @@ packages: - kind: conda name: libcblas version: 3.9.0 - build: 23_linux64_openblas - build_number: 23 + build: 24_linux64_openblas + build_number: 24 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-23_linux64_openblas.conda - sha256: 3e7a3236e7e03e308e1667d91d0aa70edd0cba96b4b5563ef4adde088e0881a5 - md5: eede29b40efa878cbe5bdcb767e97310 + url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda + sha256: 2a52bccc5b03cdf014d856d0b85dbd591faa335ab337d620cd6aded121d7153c + md5: f5b8822297c9c790cec0795ca1fc9be6 depends: - - libblas 3.9.0 23_linux64_openblas + - libblas 3.9.0 24_linux64_openblas constrains: - - liblapacke 3.9.0 23_linux64_openblas - - liblapack 3.9.0 23_linux64_openblas - blas * openblas + - liblapack 3.9.0 24_linux64_openblas + - liblapacke 3.9.0 24_linux64_openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 14798 - timestamp: 1721688767584 + size: 14910 + timestamp: 1726668461033 - kind: conda name: libcblas version: 3.9.0 - build: 23_win64_mkl - build_number: 23 + build: 24_win64_mkl + build_number: 24 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-23_win64_mkl.conda - sha256: 80b471a22affadc322006399209e1d12eb4ab4e3125ed6d01b4031e09de16753 - md5: 7ffb5b336cefd2e6d1e00ac1f7c9f2c9 + url: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-24_win64_mkl.conda + sha256: 297e858e9a2e6c4d9846fc101607ad31b778d8bde8591f9207e72d728a9f00a7 + md5: a42c7390d3249698c0ffb6040e9396e7 depends: - - libblas 3.9.0 23_win64_mkl + - libblas 3.9.0 24_win64_mkl constrains: - blas * mkl - - liblapack 3.9.0 23_win64_mkl - - liblapacke 3.9.0 23_win64_mkl + - liblapack 3.9.0 24_win64_mkl + - liblapacke 3.9.0 24_win64_mkl license: BSD-3-Clause license_family: BSD purls: [] - size: 5191981 - timestamp: 1721689628480 + size: 5174668 + timestamp: 1726669449378 - kind: conda - name: libclang-cpp18.1 - version: 18.1.8 - build: default_hf981a13_4 - build_number: 4 + name: libclang-cpp19.1 + version: 19.1.1 + build: default_hb5137d0_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp18.1-18.1.8-default_hf981a13_4.conda - sha256: ec7ed3003f4b1507043f7a4ad85339c7a20898ff213e8f77f51f69c30d76780a - md5: 7b72d74b57e681251536094b96ba9c46 + url: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.1-default_hb5137d0_0.conda + sha256: a2fb20bdcbebf94d654a4e770ddc910b0e1fcefe2b5acbd5dec04cb19129df2c + md5: a5feadc4a296e2d31ab5a642498ff85e depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=12 - - libllvm18 >=18.1.8,<18.2.0a0 - - libstdcxx >=12 + - libgcc >=13 + - libllvm19 >=19.1.1,<19.2.0a0 + - libstdcxx >=13 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 19176386 - timestamp: 1725430019231 + size: 20542477 + timestamp: 1728456712882 - kind: conda name: libclang13 - version: 18.1.8 - build: default_h9def88c_4 - build_number: 4 + version: 19.1.1 + build: default_h9c6a7e4_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libclang13-18.1.8-default_h9def88c_4.conda - sha256: 606c82d902a6d343b1b21967d30d73f6d54b8340fe180f2b0641fb775fba91e9 - md5: 7e3f831d4ae9820999418821be65ff67 + url: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.1-default_h9c6a7e4_0.conda + sha256: fa782c361fd77574cdd3e99762e82b8f02bc8b7da9098e8e5d5b925a153840fe + md5: 2e8992c584c2525a5b8ec7485cbe360c depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=12 - - libllvm18 >=18.1.8,<18.2.0a0 - - libstdcxx >=12 + - libgcc >=13 + - libllvm19 >=19.1.1,<19.2.0a0 + - libstdcxx >=13 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 11017079 - timestamp: 1725430212320 + size: 11820007 + timestamp: 1728456910135 - kind: conda name: libclang13 - version: 18.1.8 - build: default_ha5278ca_4 - build_number: 4 + version: 19.1.1 + build: default_ha5278ca_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libclang13-18.1.8-default_ha5278ca_4.conda - sha256: be74316898c456b0a19fcbbe73f94f6a9459d444317e932a0636882603edae3e - md5: e9d701da6db17a9638be8dc5569b0327 + url: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.1-default_ha5278ca_0.conda + sha256: 107be8d6156bc3395e2576dbbfddb7a6b72c9efbbe083a6d3f86611110f6e4cf + md5: 72f980e3852ad8f490485868bd391851 depends: - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 @@ -8923,8 +8950,8 @@ packages: license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 25317731 - timestamp: 1725434281988 + size: 26750479 + timestamp: 1728476423850 - kind: conda name: libcrc32c version: 1.1.2 @@ -8993,12 +9020,12 @@ packages: timestamp: 1689195353551 - kind: conda name: libcurl - version: 8.10.0 + version: 8.10.1 build: h1ee3ff0_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.0-h1ee3ff0_0.conda - sha256: 2209a888bb1cdd82a359927efb6a949b8359d8e67edea2fcc29a32ce17214871 - md5: e5b4f3d5768b72716f05513d6fa02ba9 + url: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.1-h1ee3ff0_0.conda + sha256: dfbac497c4fee74f67391f9c4a40cab559468b7d04ff9fad4b404a26b5e1d5b8 + md5: 7ead800e22ff7b4bccb73e42a8f7a0f4 depends: - krb5 >=1.21.3,<1.22.0a0 - libssh2 >=1.11.0,<2.0a0 @@ -9009,16 +9036,16 @@ packages: license: curl license_family: MIT purls: [] - size: 342210 - timestamp: 1726064608464 + size: 342388 + timestamp: 1726660508261 - kind: conda name: libcurl - version: 8.10.0 + version: 8.10.1 build: h58e7537_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.0-h58e7537_0.conda - sha256: 6b28ed898562ee9e351bbb209fea25c9cd4078f2010223f23dbccc3be0c3d361 - md5: 732abd8f88ee1749239335c2328e5fc3 + url: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.1-h58e7537_0.conda + sha256: 662fe145459ed58dee882e525588d1da4dcc4cbd10cfca0725d1fc3840461798 + md5: 6c8669d8228a2bbd0283911cc6d6726e depends: - __osx >=10.13 - krb5 >=1.21.3,<1.22.0a0 @@ -9030,16 +9057,16 @@ packages: license: curl license_family: MIT purls: [] - size: 402216 - timestamp: 1726064094965 + size: 402588 + timestamp: 1726660264675 - kind: conda name: libcurl - version: 8.10.0 + version: 8.10.1 build: hbbe4b11_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.0-hbbe4b11_0.conda - sha256: 7d9e0b7d855b9f0a3083190fb9931d6afb9c669009011bcb35cc3688d992a51a - md5: 657ea309ad90675ef144e7d27a271ab9 + url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda + sha256: 54e6114dfce566c3a22ad3b7b309657e3600cdb668398e95f1301360d5d52c99 + md5: 6e801c50a40301f6978c53976917b277 depends: - __glibc >=2.17,<3.0.a0 - krb5 >=1.21.3,<1.22.0a0 @@ -9052,32 +9079,46 @@ packages: license: curl license_family: MIT purls: [] - size: 425003 - timestamp: 1726063912465 + size: 424900 + timestamp: 1726659794676 - kind: conda name: libcxx - version: 18.1.8 - build: hd876a4e_7 - build_number: 7 + version: 19.1.1 + build: hf95d169_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_7.conda - sha256: ca43fcc18bff98cbf456ccc76fe113b2afe01d4156c2899b638fd1bc0323d239 - md5: c346ae5c96382a12563e3b0c403c8c4a + url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.1-hf95d169_0.conda + sha256: 390ee50a14fe5b6ac87b64eeb0130c7a79853641ae9a8926687556c76a645889 + md5: 2b09d0f92cae6df4b1670adcaca9c38c depends: - __osx >=10.13 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 439306 - timestamp: 1725403678987 + size: 528308 + timestamp: 1727863581528 +- kind: conda + name: libdeflate + version: '1.22' + build: h00291cd_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.22-h00291cd_0.conda + sha256: 681035346974c3315685dc40898e26f65f1c00cbb0b5fd80cc2599e207a34b31 + md5: a15785ccc62ae2a8febd299424081efb + depends: + - __osx >=10.13 + license: MIT + license_family: MIT + purls: [] + size: 70407 + timestamp: 1728177128525 - kind: conda name: libdeflate - version: '1.21' + version: '1.22' build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.21-h2466b09_0.conda - sha256: ebb21b910164d97dc23be83ba29a8004b9bba7536dc850c6d8b00bbb84259e78 - md5: 4ebe2206ebf4bf38f6084ad836110361 + url: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.22-h2466b09_0.conda + sha256: 579c634b7de8869cb1d76eccd4c032dc275d5a017212128502ea4dc828a5b361 + md5: a3439ce12d4e3cd887270d9436f9a4c8 depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -9085,39 +9126,24 @@ packages: license: MIT license_family: MIT purls: [] - size: 155801 - timestamp: 1722820571739 + size: 155506 + timestamp: 1728177485361 - kind: conda name: libdeflate - version: '1.21' - build: h4bc722e_0 + version: '1.22' + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda - sha256: 728c24ce835700bfdfdf106bf04233fdb040a61ca4ecfd3f41b46fa90cd4f971 - md5: 36ce76665bf67f5aac36be7a0d21b7f3 + url: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda + sha256: 780f0530a3adfc1497ba49d626931c6afc978c540e1abfde6ccd57128ded6ad6 + md5: b422943d5d772b7cc858b36ad2a92db5 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: MIT - license_family: MIT - purls: [] - size: 71163 - timestamp: 1722820138782 -- kind: conda - name: libdeflate - version: '1.21' - build: hfdf4475_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.21-hfdf4475_0.conda - sha256: 1defb3e5243a74a9ef64de2a47812f524664e46ca9dbecb8d7c746cb1779038e - md5: 88409b23a5585c15d52de0073f3c9c61 - depends: - - __osx >=10.13 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 70570 - timestamp: 1722820232914 + size: 72242 + timestamp: 1728177071251 - kind: conda name: libdrm version: 2.4.123 @@ -9171,18 +9197,19 @@ packages: - kind: conda name: libegl version: 1.7.0 - build: ha4b6fd6_0 + build: ha4b6fd6_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_0.conda - sha256: d577ab061760e631c2980eb88d6970e43391c461a89fc7cd6f98e2999d626d44 - md5: 35e52d19547cb3265a09c49de146a5ae + url: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_1.conda + sha256: e64388e983cf14354b70fe908ca3943f2481ea63df8a4de5e4d418dc2addd38e + md5: 38a5cd3be5fb620b48069e27285f1a44 depends: - __glibc >=2.17,<3.0.a0 - - libglvnd 1.7.0 ha4b6fd6_0 + - libglvnd 1.7.0 ha4b6fd6_1 license: LicenseRef-libglvnd purls: [] - size: 44492 - timestamp: 1723473193819 + size: 44620 + timestamp: 1727968589748 - kind: conda name: libev version: '4.33' @@ -9366,6 +9393,27 @@ packages: purls: [] size: 42063 timestamp: 1636489106777 +- kind: conda + name: libgcc + version: 14.1.0 + build: h1383e82_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.1.0-h1383e82_1.conda + sha256: 727d3659035d7b3c6c07c2cf90e7886ae81fd03229abf3ec9f836d9aeca11d2a + md5: 5464b6bb50d593b8f529d1fbcd58f3b2 + depends: + - _openmp_mutex >=4.5 + - libwinpthread >=12.0.0.r4.gg4f2fc60ca + constrains: + - msys2-conda-epoch <0.0a0 + - libgomp 14.1.0 h1383e82_1 + - libgcc-ng ==14.1.0=*_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 665353 + timestamp: 1724805164393 - kind: conda name: libgcc version: 14.1.0 @@ -9405,12 +9453,12 @@ packages: - kind: conda name: libgdal version: 3.9.2 - build: h57928b3_2 - build_number: 2 + build: h57928b3_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_2.conda - sha256: d4f9528a8e256b84e3d0a901a9b77d8a0fa654adda07e270e7fae0164652305e - md5: a38e3c87e1ce87145464716aec93fefc + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_7.conda + sha256: 84c190f70ff07a75cb9dd42e8a71e5f44846ce423cf68846f7000ead89ab19aa + md5: 08b7dfed6465467d50389c323e7b2a15 depends: - libgdal-core 3.9.2.* - libgdal-fits 3.9.2.* @@ -9426,18 +9474,19 @@ packages: - libgdal-tiledb 3.9.2.* - libgdal-xls 3.9.2.* license: MIT + license_family: MIT purls: [] - size: 423328 - timestamp: 1726098743251 + size: 423224 + timestamp: 1728298647854 - kind: conda name: libgdal version: 3.9.2 - build: h694c41f_2 - build_number: 2 + build: h694c41f_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_2.conda - sha256: 13a3b2dcf7ce090fe778a736cc7bc1034b0609ed6e19b91291b1958767978d64 - md5: abb256d462df471d514b7535eeb211a0 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-3.9.2-h694c41f_7.conda + sha256: 91ee388f2dbe1a01d94c45fa349214450416bad66350bee320c140cd2696f2ca + md5: da0c08bcb15d530ea6aa628f24c8fc72 depends: - libgdal-core 3.9.2.* - libgdal-fits 3.9.2.* @@ -9453,18 +9502,19 @@ packages: - libgdal-tiledb 3.9.2.* - libgdal-xls 3.9.2.* license: MIT + license_family: MIT purls: [] - size: 422986 - timestamp: 1726095491845 + size: 422980 + timestamp: 1728296294423 - kind: conda name: libgdal version: 3.9.2 - build: ha770c72_2 - build_number: 2 + build: ha770c72_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_2.conda - sha256: 951075e8d6b5f57eee5f9ff8ea02f2e416b32fde2de85a455639960bec9fcc94 - md5: 5ce32492677df51aca4b755bb4f05835 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda + sha256: 33ae5aed64c19e3e7e50f0d1bbbd7abfe814687b2a350444c4b2867f81fca9b4 + md5: 63779711c7afd4fcf9cea67538baa67a depends: - libgdal-core 3.9.2.* - libgdal-fits 3.9.2.* @@ -9480,128 +9530,131 @@ packages: - libgdal-tiledb 3.9.2.* - libgdal-xls 3.9.2.* license: MIT + license_family: MIT purls: [] - size: 422566 - timestamp: 1726093634489 + size: 422887 + timestamp: 1728295073003 - kind: conda name: libgdal-core version: 3.9.2 - build: h26ecb72_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-h26ecb72_2.conda - sha256: d7d0d7f15409286a3ab10bd8740189c659899433b33fdbcad4d62d317dbee908 - md5: 019cc81c6b62de83b4b3e7cd3487fd5d + build: h042995d_7 + build_number: 7 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h042995d_7.conda + sha256: 6e8283f660f1528e7f9f08fb60471fad259722b26a97f010d1f5d5ea5022d53a + md5: cb3a804c9039d13e7f3b93d17f6fa0de depends: - - __osx >=10.13 - blosc >=1.21.6,<2.0a0 - - geos >=3.12.2,<3.12.3.0a0 + - geos >=3.13.0,<3.13.1.0a0 - geotiff >=1.7.3,<1.8.0a0 - - giflib >=5.2.2,<5.3.0a0 - - json-c >=0.17,<0.18.0a0 - lerc >=4.0.0,<5.0a0 - libarchive >=3.7.4,<3.8.0a0 - - libcurl >=8.10.0,<9.0a0 - - libcxx >=17 - - libdeflate >=1.21,<1.22.0a0 + - libcurl >=8.10.1,<9.0a0 + - libdeflate >=1.22,<1.23.0a0 - libexpat >=2.6.3,<3.0a0 - libiconv >=1.17,<2.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libkml >=1.3.0,<1.4.0a0 - - libpng >=1.6.43,<1.7.0a0 + - libpng >=1.6.44,<1.7.0a0 - libspatialite >=5.1.0,<5.2.0a0 - libsqlite >=3.46.1,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.7.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 - xerces-c >=3.2.5,<3.3.0a0 - xz >=5.2.6,<6.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - libgdal 3.9.2.* license: MIT + license_family: MIT purls: [] - size: 8978330 - timestamp: 1726092675670 + size: 8026134 + timestamp: 1728294105231 - kind: conda name: libgdal-core version: 3.9.2 - build: h2fd8da2_2 - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h2fd8da2_2.conda - sha256: 01aadb30339abdfe9507319d92384a3fe6acbebe7365a8f51ddff430faa42993 - md5: 953d365d72cc02dae51d7abb28bba3d1 + build: hba79287_7 + build_number: 7 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-core-3.9.2-hba79287_7.conda + sha256: b2f0109e55644e1f9f9ef320cdda05ff36cf40ca60e3cede4b922a79f7143024 + md5: 6db92ed40e16f879cca6783d008e249a depends: + - __osx >=10.13 - blosc >=1.21.6,<2.0a0 - - geos >=3.12.2,<3.12.3.0a0 + - geos >=3.13.0,<3.13.1.0a0 - geotiff >=1.7.3,<1.8.0a0 + - giflib >=5.2.2,<5.3.0a0 + - json-c >=0.18,<0.19.0a0 - lerc >=4.0.0,<5.0a0 - libarchive >=3.7.4,<3.8.0a0 - - libcurl >=8.10.0,<9.0a0 - - libdeflate >=1.21,<1.22.0a0 + - libcurl >=8.10.1,<9.0a0 + - libcxx >=17 + - libdeflate >=1.22,<1.23.0a0 - libexpat >=2.6.3,<3.0a0 - libiconv >=1.17,<2.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libkml >=1.3.0,<1.4.0a0 - - libpng >=1.6.43,<1.7.0a0 + - libpng >=1.6.44,<1.7.0a0 - libspatialite >=5.1.0,<5.2.0a0 - libsqlite >=3.46.1,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.7.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - - proj >=9.4.1,<9.5.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - proj >=9.5.0,<9.6.0a0 - xerces-c >=3.2.5,<3.3.0a0 - xz >=5.2.6,<6.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - libgdal 3.9.2.* license: MIT + license_family: MIT purls: [] - size: 8050659 - timestamp: 1726093888991 + size: 9006869 + timestamp: 1728293116638 - kind: conda name: libgdal-core version: 3.9.2 - build: hbd1db40_2 - build_number: 2 + build: hd5b9bfb_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hbd1db40_2.conda - sha256: 8bd783a169fec4c427149d5afbe46094ee3591792213097cbe92f563d289e39d - md5: c4bf6b60486027bac8e8845decc98b93 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hd5b9bfb_7.conda + sha256: afff658dece6c8f4dbff2fc459bc834f8491e7ed1a491397e23280cf0917aa19 + md5: a23eb349d023a8543752566be00b6d88 depends: - __glibc >=2.17,<3.0.a0 - blosc >=1.21.6,<2.0a0 - - geos >=3.12.2,<3.12.3.0a0 + - geos >=3.13.0,<3.13.1.0a0 - geotiff >=1.7.3,<1.8.0a0 - giflib >=5.2.2,<5.3.0a0 - - json-c >=0.17,<0.18.0a0 + - json-c >=0.18,<0.19.0a0 - lerc >=4.0.0,<5.0a0 - libarchive >=3.7.4,<3.8.0a0 - - libcurl >=8.10.0,<9.0a0 - - libdeflate >=1.21,<1.22.0a0 + - libcurl >=8.10.1,<9.0a0 + - libdeflate >=1.22,<1.23.0a0 - libexpat >=2.6.3,<3.0a0 - libgcc >=13 - libiconv >=1.17,<2.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libkml >=1.3.0,<1.4.0a0 - - libpng >=1.6.43,<1.7.0a0 + - libpng >=1.6.44,<1.7.0a0 - libspatialite >=5.1.0,<5.2.0a0 - libsqlite >=3.46.1,<4.0a0 - libstdcxx >=13 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.7.0,<4.8.0a0 - libuuid >=2.38.1,<3.0a0 - libwebp-base >=1.4.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 @@ -9609,25 +9662,26 @@ packages: - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 - xerces-c >=3.2.5,<3.3.0a0 - xz >=5.2.6,<6.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - libgdal 3.9.2.* license: MIT + license_family: MIT purls: [] - size: 10460443 - timestamp: 1726092341723 + size: 10419110 + timestamp: 1728293224908 - kind: conda name: libgdal-fits version: 3.9.2 - build: h0a0b71e_2 - build_number: 2 + build: h0a0b71e_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_2.conda - sha256: f0c04f460af19b62532984b277c26bbc9aa90cdf1fc8d608ced90c9d4cbb86c3 - md5: 27d1bce97d2e6c584d48b223d6a890e1 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_7.conda + sha256: 2c5b68e63880919d8f7c17b9928db2be118153506c99a70b74b19b5e163a0bbb + md5: d24f52a1eeec99436f3e7b548ae4d048 depends: - cfitsio >=4.4.1,<4.4.2.0a0 - libgdal-core >=3.9 @@ -9636,18 +9690,19 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 496865 - timestamp: 1726096554431 + size: 497868 + timestamp: 1728296632048 - kind: conda name: libgdal-fits version: 3.9.2 - build: h2000d26_2 - build_number: 2 + build: h2000d26_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_2.conda - sha256: 99e50299f4fc4bd56004d46bc687e2911951af1eb1d789a2575f41ecc27cf466 - md5: 9c3aba4aca7b18a4bf164e140150c257 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-fits-3.9.2-h2000d26_7.conda + sha256: 8a288aed5865b96539e558cfbc5929b89ee8102cc373ed915bd834cace7b28c2 + md5: facfdaad8b0cf3e0353a039903a3c44e depends: - __osx >=10.13 - cfitsio >=4.4.1,<4.4.2.0a0 @@ -9655,18 +9710,19 @@ packages: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT + license_family: MIT purls: [] - size: 469488 - timestamp: 1726094128863 + size: 469584 + timestamp: 1728294744612 - kind: conda name: libgdal-fits version: 3.9.2 - build: h2db6552_2 - build_number: 2 + build: h2db6552_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_2.conda - sha256: 066ced830899567ba036cd65d332d5bf2bd356a0fc641b056ff72157259e9645 - md5: 6587f4912f1ad57f88221edb03346951 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda + sha256: 156ae6b968301cc0ce51c96b60df594569a6df0caab0ac936d2532d09619e2fc + md5: 524e64f1aa0ebc87230109e684f392f4 depends: - __glibc >=2.17,<3.0.a0 - cfitsio >=4.4.1,<4.4.2.0a0 @@ -9675,18 +9731,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - libstdcxx >=13 license: MIT + license_family: MIT purls: [] - size: 477927 - timestamp: 1726093088453 + size: 478024 + timestamp: 1728294286914 - kind: conda name: libgdal-grib version: 3.9.2 - build: h9237131_2 - build_number: 2 + build: h9237131_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_2.conda - sha256: 8bf83845c8f7c6114fb57e3d63d73e66f4e1457997e7b2a804c66170764e45b9 - md5: 00555b58b0bcca46e7b9e1459be4ccf2 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-grib-3.9.2-h9237131_7.conda + sha256: f4c0f6852b833c565a3ded566a53a2cb3e0de2bdd17b51c4ccf015bf02bfa723 + md5: f758eb2356a21a5c0eb2b433879f8671 depends: - __osx >=10.13 - libaec >=1.1.3,<2.0a0 @@ -9694,18 +9751,19 @@ packages: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT + license_family: MIT purls: [] - size: 663497 - timestamp: 1726094251462 + size: 663034 + timestamp: 1728294875010 - kind: conda name: libgdal-grib version: 3.9.2 - build: hc3b29a1_2 - build_number: 2 + build: hc3b29a1_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_2.conda - sha256: 05e8868e6686eb862d230c36ab6fa333c8862ebb2c86e1f3f0fa47a37ba31bc9 - md5: d010706c1424156e5cece58f1d52f085 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_7.conda + sha256: 54937f8f0b85b941321324f350a9e1895b772153b70be64539689466899dd9b1 + md5: 56a7436a66a1a4636001ce4b621a3a33 depends: - __glibc >=2.17,<3.0.a0 - libaec >=1.1.3,<2.0a0 @@ -9714,18 +9772,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - libstdcxx >=13 license: MIT + license_family: MIT purls: [] - size: 723537 - timestamp: 1726093136248 + size: 722657 + timestamp: 1728294356817 - kind: conda name: libgdal-grib version: 3.9.2 - build: hd2a089b_2 - build_number: 2 + build: hd2a089b_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_2.conda - sha256: 78f25abd0e9917d6a17a621c66536933c1c973b3665c5b11be6ea810ce7ca78d - md5: d0dd12f70a21506eb402cd9ec696fb20 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-grib-3.9.2-hd2a089b_7.conda + sha256: 695af1fb1f52beeb2b0d3f0ea922c00c5373fbbdc300a0b227ccfc258c40c086 + md5: dd3572ebb1832021bb0bd1dc06b56043 depends: - libaec >=1.1.3,<2.0a0 - libgdal-core >=3.9 @@ -9734,18 +9793,19 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 678203 - timestamp: 1726096748590 + size: 678215 + timestamp: 1728296811437 - kind: conda name: libgdal-hdf4 version: 3.9.2 - build: h430f241_2 - build_number: 2 + build: h430f241_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_2.conda - sha256: 3ed2f67582489316304398bd40fb85976c4ce426bbee363cb38baff637c4f335 - md5: 1037903f70eb07936a09418488e8f226 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf4-3.9.2-h430f241_7.conda + sha256: 8a0f9532574fd7cdb26a3b79c505c26dd04317bd742fd9587ee508e773c527ab + md5: 720ff75366c6e31259ccd53b73b27abf depends: - hdf4 >=4.2.15,<4.2.16.0a0 - libaec >=1.1.3,<2.0a0 @@ -9755,18 +9815,19 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 562233 - timestamp: 1726096927503 + size: 562915 + timestamp: 1728296979645 - kind: conda name: libgdal-hdf4 version: 3.9.2 - build: hbfba102_2 - build_number: 2 + build: hbfba102_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_2.conda - sha256: c38e3bc65dc35de96c9f8cfee7ae6606f72214714c53db4581f7f5baf2516007 - md5: 2d081b1f5acdcd3dcf7ed52c1d775a6a + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf4-3.9.2-hbfba102_7.conda + sha256: b6209c24e8e188d740453315e76ef827dfa4791e2545d0dab0026a72859059c9 + md5: 5bd4c1be3bda86369e3f2ef369c949da depends: - __osx >=10.13 - hdf4 >=4.2.15,<4.2.16.0a0 @@ -9775,18 +9836,19 @@ packages: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT + license_family: MIT purls: [] - size: 591694 - timestamp: 1726094364531 + size: 591557 + timestamp: 1728294993626 - kind: conda name: libgdal-hdf4 version: 3.9.2 - build: hd5ecb85_2 - build_number: 2 + build: hd5ecb85_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_2.conda - sha256: 7404a1108ff5903bf6b3639948114e017826f875aa7ba9c2604587da846bd656 - md5: cda7d2aa5a64fad327f7878cf5de6a00 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_7.conda + sha256: 0b8b77e609b72a51e9548e63c4423222515cd833ab5321eb7f283cf250bb00b5 + md5: 9c8431dc0b83d5fe9c12a2c0b6861a72 depends: - __glibc >=2.17,<3.0.a0 - hdf4 >=4.2.15,<4.2.16.0a0 @@ -9796,18 +9858,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - libstdcxx >=13 license: MIT + license_family: MIT purls: [] - size: 578505 - timestamp: 1726093181011 + size: 579075 + timestamp: 1728294420920 - kind: conda name: libgdal-hdf5 version: 3.9.2 - build: h6283f77_2 - build_number: 2 + build: h6283f77_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_2.conda - sha256: f87faa9e8133bf89c53ddc1a8f5abadfb41bea3d9d4aa6ed8563a226968d2e68 - md5: 0e6a02e8fa69e395d90eeeb93fb6a325 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_7.conda + sha256: 0998f51e51086a56871538c803eb4e87eb404862a38ab0109e1dc78705491db2 + md5: c8c82df3aece4e23804d178a8a8b308a depends: - __glibc >=2.17,<3.0.a0 - hdf5 >=1.14.3,<1.14.4.0a0 @@ -9816,18 +9879,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - libstdcxx >=13 license: MIT + license_family: MIT purls: [] - size: 643055 - timestamp: 1726093235520 + size: 643208 + timestamp: 1728294499558 - kind: conda name: libgdal-hdf5 version: 3.9.2 - build: had131a1_2 - build_number: 2 + build: had131a1_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_2.conda - sha256: de844b6400029027fc3826c1967e0381d06d95e76f88261b3a89d9562f0a827a - md5: 06bee2b910714c12fa510f0cf0aeb2e3 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-hdf5-3.9.2-had131a1_7.conda + sha256: f64a98ee37ed7eb3fbdd6de402190403b4b28d893d50b08bd92262e847effae4 + md5: 7e62763a83b3730f5058daefb4962053 depends: - hdf5 >=1.14.3,<1.14.4.0a0 - libgdal-core >=3.9 @@ -9836,18 +9900,19 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 613268 - timestamp: 1726097135142 + size: 613800 + timestamp: 1728297167598 - kind: conda name: libgdal-hdf5 version: 3.9.2 - build: hc0c3446_2 - build_number: 2 + build: hc0c3446_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_2.conda - sha256: 7ac2b60f99639a662234d39403c9ff3360b6f2ac85f909b25ca86d10ff44b244 - md5: ab9f93b55a1d47ac6d87d9e00f836633 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-hdf5-3.9.2-hc0c3446_7.conda + sha256: 2915ec7d368142bd733ff6f9277a303ab144cfc9498d25d4a75b3d06a59e2673 + md5: 454b0bf1e0772ac9087a6d4ae46371cb depends: - __osx >=10.13 - hdf5 >=1.14.3,<1.14.4.0a0 @@ -9855,18 +9920,19 @@ packages: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT + license_family: MIT purls: [] - size: 601423 - timestamp: 1726094496440 + size: 601534 + timestamp: 1728295128931 - kind: conda name: libgdal-jp2openjpeg version: 3.9.2 - build: h1b2c38e_2 - build_number: 2 + build: h1b2c38e_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_2.conda - sha256: c01d32450ce2d2144e4e9070eab08abae1046dd7d8a8576de7e490bffd4668e1 - md5: d5c9b358642090b82d9869e9fc846bc0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_7.conda + sha256: 27068921e22565c71cca415211f0185154db3f1d070680790f5c3cc59bb376c7 + md5: f0f86f8cb8835bb91acb8c7fa2c350b0 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -9875,18 +9941,19 @@ packages: - libstdcxx >=13 - openjpeg >=2.5.2,<3.0a0 license: MIT + license_family: MIT purls: [] - size: 469108 - timestamp: 1726093273314 + size: 469287 + timestamp: 1728294554655 - kind: conda name: libgdal-jp2openjpeg version: 3.9.2 - build: hd77bb1f_2 - build_number: 2 + build: hd77bb1f_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_2.conda - sha256: 10102b96411dfea446deb6235ecea536d34b59b81cad311648b87d4249c3dc08 - md5: 45031c24274b3035b4877732e192f392 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-jp2openjpeg-3.9.2-hd77bb1f_7.conda + sha256: 996843df733b8ef9164c88470eaaf9eecba93421e685f1a257730e52dfebb5b5 + md5: f94685cf5ac1e222b0179cd84b414f9f depends: - __osx >=10.13 - libcxx >=17 @@ -9894,18 +9961,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - openjpeg >=2.5.2,<3.0a0 license: MIT + license_family: MIT purls: [] - size: 464295 - timestamp: 1726094606697 + size: 464488 + timestamp: 1728295242884 - kind: conda name: libgdal-jp2openjpeg version: 3.9.2 - build: hed4c6cb_2 - build_number: 2 + build: hed4c6cb_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_2.conda - sha256: 5f980b14f1422f721c9142bdb0b33bfb797fd5b57b094056dd0258ce341bed15 - md5: 0df660649e46d4a71bf229f1f413b847 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-jp2openjpeg-3.9.2-hed4c6cb_7.conda + sha256: 88f40b96af8df6d164ead2a138fc3ead313821f127cfcaadf60059d07f54c62a + md5: 4135def2658a84e8739677a39eadd200 depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 @@ -9914,18 +9982,19 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 497665 - timestamp: 1726097310878 + size: 498132 + timestamp: 1728297319442 - kind: conda name: libgdal-kea version: 3.9.2 - build: h1df15e4_2 - build_number: 2 + build: h1df15e4_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_2.conda - sha256: 213b177c42b0ef5328e5fd4dbe27ed34765e49bb3f4eef38c1c43a9e5a7c9932 - md5: d9bfebd28be759b818d401a201357b87 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_7.conda + sha256: 252d6f9cc3bb2fa2788e73ce5c8a4587f653f9b1f1dc34ecc022ef4aa1b53bf0 + md5: c693e703649051ee9db0fabd4fcd0483 depends: - __glibc >=2.17,<3.0.a0 - hdf5 >=1.14.3,<1.14.4.0a0 @@ -9936,18 +10005,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - libstdcxx >=13 license: MIT + license_family: MIT purls: [] - size: 480381 - timestamp: 1726093574319 + size: 480532 + timestamp: 1728294987957 - kind: conda name: libgdal-kea version: 3.9.2 - build: h95b1a77_2 - build_number: 2 + build: h95b1a77_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_2.conda - sha256: b1a5273b69e75769b7c8eec1caf4420b14b5bbe3d7f0f3ebd26bbbcf63407f8f - md5: 2f388a9b3f0d563f070cd86d3abac645 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-kea-3.9.2-h95b1a77_7.conda + sha256: eb0c503103eaf5821564ab47d0dfcc23481037115cb988d7af3c7963fadbb9c4 + md5: a20cfdcc279d52808b1a53296bfd308c depends: - hdf5 >=1.14.3,<1.14.4.0a0 - kealib >=1.5.3,<1.6.0a0 @@ -9958,18 +10028,19 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 518345 - timestamp: 1726098538348 + size: 518891 + timestamp: 1728298457098 - kind: conda name: libgdal-kea version: 3.9.2 - build: he223473_2 - build_number: 2 + build: he223473_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_2.conda - sha256: 8687adb1ab399a08f9901dfd6ba22cf38fac91b3b56cafc4efbd846cfaaacc87 - md5: 4f4b68b06d7e4b3fcf5a5999bddb1298 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-kea-3.9.2-he223473_7.conda + sha256: 49967afed2cd4aca663546aee988041da402dca3d45a6af296bbe488ba98b7ce + md5: 9766fd199749092d67491d52d1859a29 depends: - __osx >=10.13 - hdf5 >=1.14.3,<1.14.4.0a0 @@ -9979,18 +10050,19 @@ packages: - libgdal-hdf5 3.9.2.* - libkml >=1.3.0,<1.4.0a0 license: MIT + license_family: MIT purls: [] - size: 475299 - timestamp: 1726095352836 + size: 475267 + timestamp: 1728296100612 - kind: conda name: libgdal-netcdf version: 3.9.2 - build: h55e78d3_2 - build_number: 2 + build: h55e78d3_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_2.conda - sha256: 867372f026665f84ada56bd4b8baa649c2e4060d037ab114d7acfcd6b4d6a766 - md5: a020d9b9f0a667981f2cdc9929e85ec0 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-netcdf-3.9.2-h55e78d3_7.conda + sha256: 0e74029976a25c4088d7aa5967f0cd81e7963aae4633fd01bae9a63df5ad799e + md5: 55406e979f268a570952c49fe941eb04 depends: - hdf4 >=4.2.15,<4.2.16.0a0 - hdf5 >=1.14.3,<1.14.4.0a0 @@ -10003,18 +10075,19 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 666378 - timestamp: 1726098736520 + size: 667045 + timestamp: 1728298642403 - kind: conda name: libgdal-netcdf version: 3.9.2 - build: he83ae23_2 - build_number: 2 + build: he83ae23_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_2.conda - sha256: ae605bc31e3b9503b1e6ae733530fdd1cc21721bc6fead69c3dd2066e6202db1 - md5: 049cd27768fd0735bde2237c2f436e88 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-netcdf-3.9.2-he83ae23_7.conda + sha256: 0cfb0a6d6b50def0b5d05603a6e81ac0813791cba56343ce37a1a98f2b3d6d6d + md5: 7003ce33614d0435766fd7e58f4170ea depends: - __osx >=10.13 - hdf4 >=4.2.15,<4.2.16.0a0 @@ -10026,18 +10099,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - libnetcdf >=4.9.2,<4.9.3.0a0 license: MIT + license_family: MIT purls: [] - size: 692336 - timestamp: 1726095485705 + size: 692175 + timestamp: 1728296286589 - kind: conda name: libgdal-netcdf version: 3.9.2 - build: hf2d2f32_2 - build_number: 2 + build: hf2d2f32_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_2.conda - sha256: d7b5a4b57c6231e452c058e1b7921b904a47f6b11fb3d7db3eb3329d7d9fa34d - md5: 23adb723f7ee5d05b2925a6562cdfc35 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_7.conda + sha256: 58155b0df43b090ed55341c9b24e07047db9b4bd8889309a02180e99c4e69558 + md5: 4015ef020928219acc0b5c9edbce8d30 depends: - __glibc >=2.17,<3.0.a0 - hdf4 >=4.2.15,<4.2.16.0a0 @@ -10050,18 +10124,19 @@ packages: - libnetcdf >=4.9.2,<4.9.3.0a0 - libstdcxx >=13 license: MIT + license_family: MIT purls: [] - size: 737551 - timestamp: 1726093631219 + size: 738251 + timestamp: 1728295070799 - kind: conda name: libgdal-pdf version: 3.9.2 - build: h600f43f_2 - build_number: 2 + build: h600f43f_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_2.conda - sha256: d66d7c3e3758c60df4ac4eb2096085f77c6629303b6f869305249db01e2b204b - md5: f55a803c395491ed48baf6d1fd464eb3 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_7.conda + sha256: 10ebe0047d4300152185c095a74a3159fcc3b3d2b0e0bb111381dc7d018cbf65 + md5: 567066db0820f4983a6741e429c651d1 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -10070,18 +10145,19 @@ packages: - libstdcxx >=13 - poppler license: MIT + license_family: MIT purls: [] - size: 667813 - timestamp: 1726093333981 + size: 668085 + timestamp: 1728294642230 - kind: conda name: libgdal-pdf version: 3.9.2 - build: h85e1e31_2 - build_number: 2 + build: h85e1e31_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_2.conda - sha256: c9e9a11af7fe7dc2eb306300d7972e2d03e5d3abc0945407deb93026d1749c91 - md5: 95b05a267dc00e4f5d3efc2cb56feea7 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pdf-3.9.2-h85e1e31_7.conda + sha256: 9f8ebb3bcf2aa6e1dd10ecba400a22e5c9fbc4d958f3de890ed47ccde8206e0a + md5: a2074ffb131313c13bd775e2ae884ed8 depends: - __osx >=10.13 - libcxx >=17 @@ -10089,18 +10165,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - poppler license: MIT + license_family: MIT purls: [] - size: 610106 - timestamp: 1726094743209 + size: 610237 + timestamp: 1728295393431 - kind: conda name: libgdal-pdf version: 3.9.2 - build: ha1c78db_2 - build_number: 2 + build: ha1c78db_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_2.conda - sha256: ae9c94c3129296194f005d0d29f63ec72ddb52cf18ed173fc27b43d50f3ca4d1 - md5: 7546b2895c42bbeb90b59e80dcf28419 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pdf-3.9.2-ha1c78db_7.conda + sha256: 0ae547978fb81ce421257dd2cff84e83cc78e921f0bd5c99ceeef167164188fa + md5: 27fb85b4f988b64aa9ab0e926ddd6b33 depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 @@ -10109,201 +10186,211 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 626553 - timestamp: 1726097532609 + size: 626278 + timestamp: 1728297526707 - kind: conda name: libgdal-pg version: 3.9.2 - build: h151b34b_2 - build_number: 2 + build: h5e77dd0_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h151b34b_2.conda - sha256: 2d22ad3addcc207e9db4b9a9d4669e24e8b6aaf874eb44b59db46a6912dab148 - md5: 4719aec6235ddee1407f2359c15a597e + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h5e77dd0_7.conda + sha256: 24bebc7b479dc2373739655a4e8e4142d47d64b37dd5529fdf87dfc2e7586cc4 + md5: e86b26f53ae868565e95fde5b10753d3 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libpq >=16.4,<17.0a0 + - libpq >=17.0,<18.0a0 - libstdcxx >=13 - postgresql license: MIT + license_family: MIT purls: [] - size: 526700 - timestamp: 1726093379300 + size: 527072 + timestamp: 1728294709895 - kind: conda name: libgdal-pg version: 3.9.2 - build: h7ffd8cf_2 - build_number: 2 + build: h7313820_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7ffd8cf_2.conda - sha256: de450f862f4595949b7c0e4f9594a50d400a049a43a8c6a4abec2b208e906f30 - md5: 5cd82b1f469ec92d3000f537dd9c9c70 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-pg-3.9.2-h7313820_7.conda + sha256: 98c337157c792a87de664a1d68e21fd3a46adf411d77b25796e465450d698cdc + md5: 2f5b01cbb8a163a6eb75537eda6b5dad depends: - __osx >=10.13 - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libpq >=16.4,<17.0a0 + - libpq >=17.0,<18.0a0 - postgresql license: MIT + license_family: MIT purls: [] - size: 507951 - timestamp: 1726094861491 + size: 507879 + timestamp: 1728295513705 - kind: conda name: libgdal-pg version: 3.9.2 - build: ha693a0f_2 - build_number: 2 + build: hfaa227e_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-ha693a0f_2.conda - sha256: 9518ddc91776c520b53000aa45195846777671b21c62ce8d129bdb416f94152c - md5: 84c8cbff3d3aa8e9b3a777cf9577636b + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-pg-3.9.2-hfaa227e_7.conda + sha256: 73a4bfb87b58d74527142887dbc941e4abffd9a6a377115d47ea5be79303da8c + md5: c37be85d5b9a3b1fc94a99d6f8327ab1 depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libpq >=16.4,<17.0a0 + - libpq >=17.0,<18.0a0 - postgresql - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 532039 - timestamp: 1726097733407 + size: 532532 + timestamp: 1728297702019 - kind: conda name: libgdal-postgisraster version: 3.9.2 - build: h151b34b_2 - build_number: 2 + build: h5e77dd0_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h151b34b_2.conda - sha256: 56a0dd9797bb33fe568a65fda62104fe9605b65d1ee0ad15f238a7b1939d5b1d - md5: 3a508686bc81fd6552ffbcbfac942dde + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda + sha256: cfa1968d15e1e4ab94c74a426c55795bd2b702bd9e99767cb74633dfba77afbc + md5: 3392965ffc4e8b7c66a532750ce0e91f depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libpq >=16.4,<17.0a0 + - libpq >=17.0,<18.0a0 - libstdcxx >=13 - postgresql license: MIT + license_family: MIT purls: [] - size: 480156 - timestamp: 1726093423575 + size: 480524 + timestamp: 1728294772712 - kind: conda name: libgdal-postgisraster version: 3.9.2 - build: h7ffd8cf_2 - build_number: 2 + build: h7313820_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7ffd8cf_2.conda - sha256: bbabb16d210723591ef6a20aa743deefd7e704813a459b2959203d967efb084c - md5: 477603447d6359fc22119bd95b49e98e + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-postgisraster-3.9.2-h7313820_7.conda + sha256: de12866239eff9107f88ac53f9febad34ce9af8b3a85548c177d3d1890e0bdd2 + md5: 809a29ffbae53df08e05c190b89e7373 depends: - __osx >=10.13 - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libpq >=16.4,<17.0a0 + - libpq >=17.0,<18.0a0 - postgresql license: MIT + license_family: MIT purls: [] - size: 470037 - timestamp: 1726094979700 + size: 470127 + timestamp: 1728295635265 - kind: conda name: libgdal-postgisraster version: 3.9.2 - build: ha693a0f_2 - build_number: 2 + build: hfaa227e_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-ha693a0f_2.conda - sha256: ab42f085618718d70d7dc3bb2bf0eb472516b6a04b92bad8d90e9bdd791e0533 - md5: 96eea459fa2e4f822f29d2352c10c32c + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-hfaa227e_7.conda + sha256: eb60e16b2727fa20288c8c5d01b168df38476904dd0c3ea3d92ae96b3579b7d9 + md5: b24ac84c98145ebf555950076c0417da depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - libpq >=16.4,<17.0a0 + - libpq >=17.0,<18.0a0 - postgresql - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 504874 - timestamp: 1726097929736 + size: 505346 + timestamp: 1728297893312 - kind: conda name: libgdal-tiledb version: 3.9.2 - build: h4a3bace_2 - build_number: 2 + build: h4a3bace_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda - sha256: f3f2ecc68a847ab644c6ba7d9a38dcdd4996393f851ddaef401fe4d90f0ba8e9 - md5: c3fac34ecba2fcf9d5d31a03b975d5a1 + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_7.conda + sha256: 9e536d6c89d3ea1ae2b327b629a31195e5217ad7b2c929025c1fba4cce141330 + md5: 57c9f5047557fe386e5c1b2951a76ea8 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - libstdcxx >=13 - - tiledb >=2.26.0,<2.27.0a0 + - tiledb >=2.26.2,<2.27.0a0 license: MIT + license_family: MIT purls: [] - size: 681765 - timestamp: 1726093490312 + size: 682363 + timestamp: 1728294866734 - kind: conda name: libgdal-tiledb version: 3.9.2 - build: h6b11327_2 - build_number: 2 + build: h6b11327_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_2.conda - sha256: dda569758f13d5ec8d485397abce0a4b6fceedd1e06d10f031f4c5f644dd2709 - md5: 82799fcd51f47381b7398e9521c1ad95 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-tiledb-3.9.2-h6b11327_7.conda + sha256: 566c9b66f11d5cd91e4cb27da8e12366b7aa2ce74604fb33d0ceceeec33675aa + md5: 11852847dee892f18317b34e609ed62c depends: - __osx >=10.13 - libcxx >=17 - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - tiledb >=2.26.0,<2.27.0a0 + - tiledb >=2.26.2,<2.27.0a0 license: MIT + license_family: MIT purls: [] - size: 630738 - timestamp: 1726095119594 + size: 630567 + timestamp: 1728295778156 - kind: conda name: libgdal-tiledb version: 3.9.2 - build: hb8b5d01_2 - build_number: 2 + build: hb8b5d01_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_2.conda - sha256: 3d9b00940e17134d0c16c6406b68d7b9afa33e49a8c695f11eb0da7d467f4718 - md5: acc3612fa26fab59d18d4075d12c15b0 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_7.conda + sha256: 72411ba4a617603248e10350598e8c5a8d28636a792e779bd399e7faa2ea11e0 + md5: 6d0cd845caa291c4b8121a32a580f332 depends: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 - - tiledb >=2.26.0,<2.27.0a0 + - tiledb >=2.26.2,<2.27.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 628075 - timestamp: 1726098183036 + size: 628428 + timestamp: 1728298123989 - kind: conda name: libgdal-xls version: 3.9.2 - build: h03c987c_2 - build_number: 2 + build: h03c987c_7 + build_number: 7 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_2.conda - sha256: 2215783e6f3aa153a0c2ff3d9dd2c95481a8153fc2563192a14fff8bf4f85d19 - md5: a19adc2adb5cc91a831838fdd69e895d + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda + sha256: 363f00ff7b5295a65e918c5f96bcd8fd3daba09fd8d6563de7b7d266144f86e5 + md5: 165f12373452e8d17889e9c877431acf depends: - __glibc >=2.17,<3.0.a0 - freexl >=2.0.0,<3.0a0 @@ -10312,18 +10399,19 @@ packages: - libkml >=1.3.0,<1.4.0a0 - libstdcxx >=13 license: MIT + license_family: MIT purls: [] - size: 434313 - timestamp: 1726093528376 + size: 434813 + timestamp: 1728294922029 - kind: conda name: libgdal-xls version: 3.9.2 - build: hc33d192_2 - build_number: 2 + build: hc33d192_7 + build_number: 7 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_2.conda - sha256: af48ecd38ed1b1e0a8a55c7bae5a646f164275f8aba93cc3aaa7939c99b30dcb - md5: 19731e92fa7d594f556519d4b4c40b36 + url: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_7.conda + sha256: dc01de5fe688b299380e1841ee5922862e089e12c2a860aa37aaefd374b3d418 + md5: 21c841ccc91f10276e79f7caa2e497bc depends: - __osx >=10.13 - freexl >=2.0.0,<3.0a0 @@ -10331,18 +10419,19 @@ packages: - libgdal-core >=3.9 - libkml >=1.3.0,<1.4.0a0 license: MIT + license_family: MIT purls: [] - size: 431816 - timestamp: 1726095230562 + size: 432005 + timestamp: 1728295934357 - kind: conda name: libgdal-xls version: 3.9.2 - build: hd0e23a6_2 - build_number: 2 + build: hd0e23a6_7 + build_number: 7 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_2.conda - sha256: 9dc2e89ab5d598d20f29ebda3a90655b18c2ae7b088b01bd36ac9e19fb0d7df5 - md5: 7f4f627d1a972d3b98d6441b21c17db4 + url: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_7.conda + sha256: 44c5c2fd75a5b34fbd71316bef26018023376adaaf2145a8277f906899184eb2 + md5: c7366f3ec5ce24208987333a077b42f6 depends: - freexl >=2.0.0,<3.0a0 - libgdal-core >=3.9 @@ -10351,9 +10440,10 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: MIT + license_family: MIT purls: [] - size: 466183 - timestamp: 1726098349141 + size: 466632 + timestamp: 1728298290371 - kind: conda name: libgfortran version: 5.0.0 @@ -10443,117 +10533,135 @@ packages: - kind: conda name: libgl version: 1.7.0 - build: ha4b6fd6_0 + build: ha4b6fd6_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_0.conda - sha256: 993f3bfe04e16c58fceab108bf54f1522ff93a657a22a4ced8c56658001d55fa - md5: 3deca8c25851196c28d1c84dd4ae9149 + url: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda + sha256: 2de573a2231d0ffa13242e274d33b7bae88fb0a178392fd4a03cf803a47e4051 + md5: 204892bce2e44252b5cf272712f10bdd depends: - __glibc >=2.17,<3.0.a0 - - libglvnd 1.7.0 ha4b6fd6_0 - - libglx 1.7.0 ha4b6fd6_0 + - libglvnd 1.7.0 ha4b6fd6_1 + - libglx 1.7.0 ha4b6fd6_1 license: LicenseRef-libglvnd purls: [] - size: 132746 - timestamp: 1723473216625 + size: 134476 + timestamp: 1727968620103 - kind: conda name: libglib - version: 2.80.3 - build: h315aac3_2 - build_number: 2 + version: 2.82.1 + build: h2ff4ddf_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - sha256: 7470e664b780b91708bed356cc634874dfc3d6f17cbf884a1d6f5d6d59c09f91 - md5: b0143a3e98136a680b728fdf9b42a258 + url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.1-h2ff4ddf_0.conda + sha256: fe9bebb2347d0fc8c5c9e1dd0750e0d640061dc66712a4218bad46d0adc11131 + md5: 47a2209fa0df11797df0b767d1de1275 depends: - __glibc >=2.17,<3.0.a0 - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libiconv >=1.17,<2.0a0 - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.44,<10.45.0a0 constrains: - - glib 2.80.3 *_2 + - glib 2.82.1 *_0 license: LGPL-2.1-or-later purls: [] - size: 3922900 - timestamp: 1723208802469 + size: 3928640 + timestamp: 1727380513702 - kind: conda name: libglib - version: 2.80.3 - build: h7025463_2 - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libglib-2.80.3-h7025463_2.conda - sha256: 1461eb3b10814630acd1f3a11fc47dbb81c46a4f1f32ed389e3ae050a09c4903 - md5: b60894793e7e4a555027bfb4e4ed1d54 + version: 2.82.1 + build: h63bbcf2_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.1-h63bbcf2_0.conda + sha256: 9f19b7d33a7f49545fdbd514d2f577e6dc3638b17210c93877b52c021ac5ad22 + md5: 0a17d0518293f31c5495674ad3ab4e89 depends: + - __osx >=10.13 - libffi >=3.4,<4.0a0 - libiconv >=1.17,<2.0a0 - libintl >=0.22.5,<1.0a0 - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.44,<10.45.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - - glib 2.80.3 *_2 + - glib 2.82.1 *_0 license: LGPL-2.1-or-later purls: [] - size: 3726738 - timestamp: 1723209368854 + size: 3729756 + timestamp: 1727380687514 - kind: conda name: libglib - version: 2.80.3 - build: h736d271_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.80.3-h736d271_2.conda - sha256: 5543fbb3b1487ffd3a4acbb0b5322ab74ef48c68748fa2907fb47fb825a90bf8 - md5: 975e416ffec75b06cbf8532f5fc1a55e + version: 2.82.1 + build: h7025463_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.1-h7025463_0.conda + sha256: b037bc6a1219f3ab9e3e408edcb0659ac9bae542c50213977e6ade90f70fc62e + md5: f784035a6fcb34f0583ca3bd0dcc6c3b depends: - - __osx >=10.13 - libffi >=3.4,<4.0a0 - libiconv >=1.17,<2.0a0 - libintl >=0.22.5,<1.0a0 - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.44,<10.45.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - glib 2.80.3 *_2 + - glib 2.82.1 *_0 license: LGPL-2.1-or-later purls: [] - size: 3674504 - timestamp: 1723209150363 + size: 3759023 + timestamp: 1727442499076 - kind: conda name: libglvnd version: 1.7.0 - build: ha4b6fd6_0 + build: ha4b6fd6_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_0.conda - sha256: ce35ceca19110ba9d27cb0058e55c62ea0489b3dfad76d016df2d0bf4f027998 - md5: e46b5ae31282252e0525713e34ffbe2b + url: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda + sha256: 67942c2b6e4ddb705640b5db962e678f17d8305df5c1633e939cef1158a95058 + md5: 1ece2ccb1dc8c68639712b05e0fae070 depends: - __glibc >=2.17,<3.0.a0 license: LicenseRef-libglvnd purls: [] - size: 129500 - timestamp: 1723473188457 + size: 132216 + timestamp: 1727968577428 - kind: conda name: libglx version: 1.7.0 - build: ha4b6fd6_0 + build: ha4b6fd6_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_0.conda - sha256: 72ba2a55de3d8902b40359433bbc51f50574067eaf2ae4081a2347d3735e30bb - md5: b470cc353c5b852e0d830e8d5d23e952 + url: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda + sha256: facc239145719034f7b8815d9630032e701d26534dae28303cdbae8b19590a82 + md5: 80a57756c545ad11f9847835aa21e6b2 depends: - __glibc >=2.17,<3.0.a0 - - libglvnd 1.7.0 ha4b6fd6_0 - - xorg-libx11 >=1.8.9,<2.0a0 + - libglvnd 1.7.0 ha4b6fd6_1 + - xorg-libx11 >=1.8.10,<2.0a0 license: LicenseRef-libglvnd purls: [] - size: 79343 - timestamp: 1723473207891 + size: 77902 + timestamp: 1727968607539 +- kind: conda + name: libgomp + version: 14.1.0 + build: h1383e82_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.1.0-h1383e82_1.conda + sha256: c7c2c51397d57c2e4d48f8676d340ddf88067886f849128ba7d6bd24619dbccc + md5: f8aa80643cd3ff1767ea4e6008ed52d1 + depends: + - libwinpthread >=12.0.0.r4.gg4f2fc60ca + constrains: + - msys2-conda-epoch <0.0a0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 522202 + timestamp: 1724805108466 - kind: conda name: libgomp version: 14.1.0 @@ -10572,135 +10680,141 @@ packages: timestamp: 1724801743478 - kind: conda name: libgoogle-cloud - version: 2.28.0 - build: h26d7fe4_0 + version: 2.29.0 + build: h438788a_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda - sha256: d87b83d91a9fed749b80dea915452320598035949804db3be616b8c3d694c743 - md5: 2c51703b4d775f8943c08a361788131b + url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.29.0-h438788a_1.conda + sha256: cf5c97fb1a270a072faae6decd7e74681e7ead99a1cec6325c8d7a7213bcb2d1 + md5: 3d27459264de681a74c0aebbbd3ecd8f depends: - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libcurl >=8.9.1,<9.0a0 - - libgcc-ng >=12 - - libgrpc >=1.62.2,<1.63.0a0 - - libprotobuf >=4.25.3,<4.25.4.0a0 - - libstdcxx-ng >=12 - - openssl >=3.3.1,<4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libcurl >=8.10.1,<9.0a0 + - libgcc >=13 + - libgrpc >=1.65.5,<1.66.0a0 + - libprotobuf >=5.27.5,<5.27.6.0a0 + - libstdcxx >=13 + - openssl >=3.3.2,<4.0a0 constrains: - - libgoogle-cloud 2.28.0 *_0 + - libgoogle-cloud 2.29.0 *_1 license: Apache-2.0 license_family: Apache purls: [] - size: 1226849 - timestamp: 1723370075980 + size: 1200532 + timestamp: 1727245497586 - kind: conda name: libgoogle-cloud - version: 2.28.0 - build: h5e7cea3_0 + version: 2.29.0 + build: ha00044d_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.28.0-h5e7cea3_0.conda - sha256: 30c5eb3509d0a4b5418e58da7cda7cfee7d06b8759efaec1f544f7fcb54bcac0 - md5: 78a31d951ca2e524c6c223d865edd7ae + url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.29.0-ha00044d_1.conda + sha256: 829e30b66305374cef5dfc9c8d90915978b0d4c1caf465c5cc35bdba13c94bcb + md5: e7542181fcc204326558a2d3e9e0b5c2 depends: - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libcurl >=8.9.1,<9.0a0 - - libgrpc >=1.62.2,<1.63.0a0 - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libcurl >=8.10.1,<9.0a0 + - libgrpc >=1.65.5,<1.66.0a0 + - libprotobuf >=5.27.5,<5.27.6.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 constrains: - - libgoogle-cloud 2.28.0 *_0 + - libgoogle-cloud 2.29.0 *_1 license: Apache-2.0 license_family: Apache purls: [] - size: 14358 - timestamp: 1723371187491 + size: 14469 + timestamp: 1727246130012 - kind: conda name: libgoogle-cloud - version: 2.28.0 - build: h721cda5_0 + version: 2.29.0 + build: hade041e_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.28.0-h721cda5_0.conda - sha256: bf45c8d96cb69476814a674f59640178a6b7868d644351bd84e85e37a045795b - md5: c06aee3922ccde627583a5480a0c8445 + url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.29.0-hade041e_1.conda + sha256: 87e526f00c24f4e7c3a6cfccb966509599708f29cfcbd6a7238a0306d59e036c + md5: 0966331f6b1362dc16aebfa76b0c33ab depends: - __osx >=10.13 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libcurl >=8.9.1,<9.0a0 - - libcxx >=16 - - libgrpc >=1.62.2,<1.63.0a0 - - libprotobuf >=4.25.3,<4.25.4.0a0 - - openssl >=3.3.1,<4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libcurl >=8.10.1,<9.0a0 + - libcxx >=17 + - libgrpc >=1.65.5,<1.66.0a0 + - libprotobuf >=5.27.5,<5.27.6.0a0 + - openssl >=3.3.2,<4.0a0 constrains: - - libgoogle-cloud 2.28.0 *_0 + - libgoogle-cloud 2.29.0 *_1 license: Apache-2.0 license_family: Apache purls: [] - size: 863685 - timestamp: 1723369321726 + size: 864830 + timestamp: 1727245147999 - kind: conda name: libgoogle-cloud-storage - version: 2.28.0 - build: h9e84e37_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.28.0-h9e84e37_0.conda - sha256: c55dfdd25ecc40383ba9829ae23cca95a0c48280794edc1280fdca2bc0342ef4 - md5: 6f55d1a6c280ffaddb741dc770cb817c + version: 2.29.0 + build: h0121fbd_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.29.0-h0121fbd_1.conda + sha256: 78e22048ab9bb554c4269f5e2a4ab9baae2c0f490418e0cdecd04e5c59130805 + md5: ea93fded95ddff7798e28954c446e22f depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - libabseil - libcrc32c >=1.1.2,<1.2.0a0 - libcurl - - libcxx >=16 - - libgoogle-cloud 2.28.0 h721cda5_0 + - libgcc >=13 + - libgoogle-cloud 2.29.0 h438788a_1 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - openssl license: Apache-2.0 license_family: Apache purls: [] - size: 542383 - timestamp: 1723370234408 + size: 781418 + timestamp: 1727245657213 - kind: conda name: libgoogle-cloud-storage - version: 2.28.0 - build: ha262f82_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda - sha256: 3237bc1ee88dab8d8fea0a1886e12a0262ff5e471944a234c314aa1da411588e - md5: 9e7960f0b9ab3895ef73d92477c47dae + version: 2.29.0 + build: h8126ed0_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.29.0-h8126ed0_1.conda + sha256: 205a2d89c50fa60aa5f61d7b06c6f13a672ad8bfabb3705a234605eb156cf37b + md5: e5c845b99aa7b4d90a6717f2739d622b depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - libabseil - libcrc32c >=1.1.2,<1.2.0a0 - libcurl - - libgcc-ng >=12 - - libgoogle-cloud 2.28.0 h26d7fe4_0 - - libstdcxx-ng >=12 + - libcxx >=17 + - libgoogle-cloud 2.29.0 hade041e_1 - libzlib >=1.3.1,<2.0a0 - openssl license: Apache-2.0 license_family: Apache purls: [] - size: 769298 - timestamp: 1723370220027 + size: 551487 + timestamp: 1727246090236 - kind: conda name: libgoogle-cloud-storage - version: 2.28.0 - build: he5eb982_0 + version: 2.29.0 + build: he5eb982_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.28.0-he5eb982_0.conda - sha256: 6318a81a6ef2a72b70c2ddfdadaa5ac79fce431ffa1125e7ca0f9286fa9d9342 - md5: c60153238c7fcdda236b51248220c4bb + url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.29.0-he5eb982_1.conda + sha256: 9134d894877858f10efa2c7102a7f69f9e3a96caa1f2c4097c45cde41dcc9fe8 + md5: 6d9b4c7bcb190d7ca32531b6504d50b7 depends: - libabseil - libcrc32c >=1.1.2,<1.2.0a0 - libcurl - - libgoogle-cloud 2.28.0 h5e7cea3_0 + - libgoogle-cloud 2.29.0 ha00044d_1 - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -10708,107 +10822,88 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 14259 - timestamp: 1723371607596 + size: 14354 + timestamp: 1727246493318 - kind: conda name: libgrpc - version: 1.62.2 - build: h15f2491_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda - sha256: 28241ed89335871db33cb6010e9ccb2d9e9b6bb444ddf6884f02f0857363c06a - md5: 8dabe607748cb3d7002ad73cd06f1325 + version: 1.65.5 + build: ha20e22e_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.65.5-ha20e22e_0.conda + sha256: f3aee23aac459be6206081ac9c996d3a7480deb1faab6088c268d29a890b9875 + md5: b550afe2fea16769fa9ef3fcbeadf0c1 depends: - - c-ares >=1.28.1,<2.0a0 + - c-ares >=1.33.1,<2.0a0 - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libgcc-ng >=12 - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libprotobuf >=5.27.5,<5.27.6.0a0 - libre2-11 >=2023.9.1,<2024.0a0 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.1,<4.0a0 - - re2 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.2,<4.0a0 + - re2 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - grpc-cpp =1.62.2 + - grpc-cpp =1.65.5 license: Apache-2.0 license_family: APACHE purls: [] - size: 7316832 - timestamp: 1713390645548 + size: 16648528 + timestamp: 1727201450991 - kind: conda name: libgrpc - version: 1.62.2 - build: h384b2fc_0 + version: 1.65.5 + build: hb88832f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.62.2-h384b2fc_0.conda - sha256: 7c228040e7dac4e5e7e6935a4decf6bc2155cc05fcfb0811d25ccb242d0036ba - md5: 9421f67cf8b4bc976fe5d0c3ab42de18 + url: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.65.5-hb88832f_0.conda + sha256: b862af63c5b362743527fde9b7411b6a2d1270f858acbabc2beeee306465b405 + md5: 8ef969b891fe57caf3acfb5495ec0ab9 depends: - __osx >=10.13 - - c-ares >=1.28.1,<2.0a0 + - c-ares >=1.33.1,<2.0a0 - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libcxx >=16 - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libcxx >=17 + - libprotobuf >=5.27.5,<5.27.6.0a0 - libre2-11 >=2023.9.1,<2024.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.1,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.2,<4.0a0 - re2 constrains: - - grpc-cpp =1.62.2 + - grpc-cpp =1.65.5 license: Apache-2.0 license_family: APACHE purls: [] - size: 5189573 - timestamp: 1713392887258 + size: 5152392 + timestamp: 1727200873427 - kind: conda name: libgrpc - version: 1.62.2 - build: h5273850_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.62.2-h5273850_0.conda - sha256: 08794bf5ea0e19ac23ed47d0f8699b5c05c46f14334b41f075e53bac9bbf97d8 - md5: 2939e4b5baecfeac1e8dee5c4f579f1a + version: 1.65.5 + build: hf5c653b_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.65.5-hf5c653b_0.conda + sha256: d279abd46262e817c7a00aeb4df9b5ed4de38130130b248e2c50875e982f30fa + md5: 3b0048cabc6815a4d8874a0240519d32 depends: - - c-ares >=1.28.1,<2.0a0 + - __glibc >=2.17,<3.0.a0 + - c-ares >=1.32.3,<2.0a0 - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libgcc >=13 + - libprotobuf >=5.27.5,<5.27.6.0a0 - libre2-11 >=2023.9.1,<2024.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.1,<4.0a0 + - libstdcxx >=13 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.2,<4.0a0 - re2 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - - grpc-cpp =1.62.2 + - grpc-cpp =1.65.5 license: Apache-2.0 license_family: APACHE purls: [] - size: 16097674 - timestamp: 1713392821679 -- kind: conda - name: libhwloc - version: 2.11.1 - build: default_h8125262_1000 - build_number: 1000 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.11.1-default_h8125262_1000.conda - sha256: 92728e292640186759d6dddae3334a1bc0b139740b736ffaeccb825fb8c07a2e - md5: 933bad6e4658157f1aec9b171374fde2 - depends: - - libxml2 >=2.12.7,<3.0a0 - - pthreads-win32 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: BSD-3-Clause - license_family: BSD - purls: [] - size: 2379689 - timestamp: 1720461835526 + size: 7229891 + timestamp: 1727200905306 - kind: conda name: libiconv version: '1.17' @@ -11021,43 +11116,43 @@ packages: - kind: conda name: liblapack version: 3.9.0 - build: 23_linux64_openblas - build_number: 23 + build: 24_linux64_openblas + build_number: 24 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-23_linux64_openblas.conda - sha256: 25c7aef86c8a1d9db0e8ee61aa7462ba3b46b482027a65d66eb83e3e6f949043 - md5: 2af0879961951987e464722fd00ec1e0 + url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda + sha256: a15da20c3c0fb5f356e5b4e2f1e87b0da11b9a46805a7f2609bf30f23453831a + md5: fd540578678aefe025705f4b58b36b2e depends: - - libblas 3.9.0 23_linux64_openblas + - libblas 3.9.0 24_linux64_openblas constrains: - - liblapacke 3.9.0 23_linux64_openblas - - libcblas 3.9.0 23_linux64_openblas - blas * openblas + - libcblas 3.9.0 24_linux64_openblas + - liblapacke 3.9.0 24_linux64_openblas license: BSD-3-Clause license_family: BSD purls: [] - size: 14823 - timestamp: 1721688775172 + size: 14911 + timestamp: 1726668467187 - kind: conda name: liblapack version: 3.9.0 - build: 23_win64_mkl - build_number: 23 + build: 24_win64_mkl + build_number: 24 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-23_win64_mkl.conda - sha256: 4f4738602d26935f4d4b0154fb23d48c276c87413c3a5e05274809abfcbe1273 - md5: 3580796ab7b7d68143f45d4d94d866b7 + url: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-24_win64_mkl.conda + sha256: 37dfa34e4c37c7bbb20df61e5badbf42d01e75e687c20be72ab13f80be99ceb9 + md5: c69b7b6756a8d58cc8cf17081fffdc5c depends: - - libblas 3.9.0 23_win64_mkl + - libblas 3.9.0 24_win64_mkl constrains: - blas * mkl - - libcblas 3.9.0 23_win64_mkl - - liblapacke 3.9.0 23_win64_mkl + - libcblas 3.9.0 24_win64_mkl + - liblapacke 3.9.0 24_win64_mkl license: BSD-3-Clause license_family: BSD purls: [] - size: 5191980 - timestamp: 1721689666180 + size: 5183452 + timestamp: 1726669499566 - kind: conda name: libllvm14 version: 14.0.6 @@ -11094,26 +11189,25 @@ packages: size: 31484415 timestamp: 1690557554081 - kind: conda - name: libllvm18 - version: 18.1.8 - build: h8b73ec9_2 - build_number: 2 + name: libllvm19 + version: 19.1.1 + build: ha7bfdaf_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libllvm18-18.1.8-h8b73ec9_2.conda - sha256: 41993f35731d8f24e4f91f9318d6d68a3cfc4b5cf5d54f193fbb3ffd246bf2b7 - md5: 2e25bb2f53e4a48873a936f8ef53e592 + url: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.1-ha7bfdaf_0.conda + sha256: 11168659796f5cfe02a0db918ee1596e9dcda8a32564b82f429a56af98fff4c9 + md5: 000cd5fc23967c97284b720cc6049c1e depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - zstd >=1.5.6,<1.6.0a0 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 38233031 - timestamp: 1723208627477 + size: 40126224 + timestamp: 1727867289123 - kind: conda name: libnetcdf version: 4.9.2 @@ -11260,6 +11354,34 @@ packages: purls: [] size: 33408 timestamp: 1697359010159 +- kind: conda + name: libntlm + version: '1.4' + build: h0d85af4_1002 + build_number: 1002 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libntlm-1.4-h0d85af4_1002.tar.bz2 + sha256: c536513b3b7a74a1a46ee426ff6d5511df521b2218ebaff0ac7badc474cddb9a + md5: d9c13a9ec123f376ac38db038b7dfbb6 + license: LGPL-2.1-or-later + purls: [] + size: 32149 + timestamp: 1661533559256 +- kind: conda + name: libntlm + version: '1.4' + build: h7f98852_1002 + build_number: 1002 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2 + sha256: 63244b73156033ea3b7c2a1581526e79b4670349d64b15f645dcdb12de441d1a + md5: e728e874159b042d92b90238a3cb0dc2 + depends: + - libgcc-ng >=9.3.0 + license: LGPL-2.1-or-later + purls: [] + size: 33201 + timestamp: 1609781914458 - kind: conda name: libopenblas version: 0.3.27 @@ -11302,67 +11424,80 @@ packages: size: 5563053 timestamp: 1720426334043 - kind: conda - name: libparquet - version: 17.0.0 - build: h39682fd_13_cpu - build_number: 13 + name: libopengl + version: 1.7.0 + build: ha4b6fd6_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda - sha256: 3c63b7391275cf6cf2a18d2dba3c30c16dd9d210373d206675e342b084cccdf4 - md5: 49c60a8dc089d8127b9368e9eb6c1a77 + url: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_1.conda + sha256: b367afa1b63462b7bd64101dc8156470e9932a3f703c3423be26dd5a539a2ec2 + md5: e12057a66af8f2a38a839754ca4481e9 depends: - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h8d2e343_13_cpu - - libgcc >=13 - - libstdcxx >=13 - - libthrift >=0.20.0,<0.20.1.0a0 - - openssl >=3.3.1,<4.0a0 - license: Apache-2.0 - license_family: APACHE + - libglvnd 1.7.0 ha4b6fd6_1 + license: LicenseRef-libglvnd purls: [] - size: 1189824 - timestamp: 1725214804075 + size: 50219 + timestamp: 1727968613527 - kind: conda name: libparquet version: 17.0.0 - build: ha915800_13_cpu - build_number: 13 + build: h59f2d37_20_cpu + build_number: 20 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-ha915800_13_cpu.conda - sha256: 8cf6d193600b4dd6cb1a8fbdea168ef6bddbf8ca1ee57d08ce6992df71a62670 - md5: 30b08e672c5dcd827ce7b44f01f4821e + url: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_20_cpu.conda + sha256: 3ba47daa2954169a2da257c310d39788d81b2c42b6a7ba909f9f1661b5567a16 + md5: 430e46b30775e1db5d7d234489bf34b2 depends: - - libarrow 17.0.0 h29daf90_13_cpu - - libthrift >=0.20.0,<0.20.1.0a0 - - openssl >=3.3.1,<4.0a0 + - libarrow 17.0.0 h297d146_20_cpu + - libthrift >=0.21.0,<0.21.1.0a0 + - openssl >=3.3.2,<4.0a0 - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 + license: Apache-2.0 + purls: [] + size: 801534 + timestamp: 1728535891220 +- kind: conda + name: libparquet + version: 17.0.0 + build: h6bd9018_20_cpu + build_number: 20 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_20_cpu.conda + sha256: dfcf9a25e3f687574fbb05c8f14288c3f621a56801a035edb1302d21ec969d7d + md5: 38587a1c1d6657c2998af72ca28e10ed + depends: + - __glibc >=2.17,<3.0.a0 + - libarrow 17.0.0 h364f349_20_cpu + - libgcc >=13 + - libstdcxx >=13 + - libthrift >=0.21.0,<0.21.1.0a0 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 - license_family: APACHE purls: [] - size: 805417 - timestamp: 1725215420059 + size: 1188248 + timestamp: 1728534858689 - kind: conda name: libparquet version: 17.0.0 - build: hf1b0f52_13_cpu - build_number: 13 + build: hc957f30_20_cpu + build_number: 20 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hf1b0f52_13_cpu.conda - sha256: f4b5b4e32cc6ffed205594b8db0764b34b896d4080473f271ff893ca44b872e9 - md5: 303a154bbc5ce01673f6b83cf20da30a + url: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_20_cpu.conda + sha256: fc496df895106ce56f2f951844bb8177734c197fd904d62549b7c75c1d666dcf + md5: 2d9be33ce40ab5506f47931dcb4e4929 depends: - __osx >=10.13 - - libarrow 17.0.0 ha60c65e_13_cpu - - libcxx >=17 - - libthrift >=0.20.0,<0.20.1.0a0 - - openssl >=3.3.1,<4.0a0 + - libarrow 17.0.0 h74c0fbd_20_cpu + - libcxx >=18 + - libthrift >=0.21.0,<0.21.1.0a0 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 - license_family: APACHE purls: [] - size: 925660 - timestamp: 1725215237883 + size: 926343 + timestamp: 1728534954359 - kind: conda name: libpciaccess version: '0.18' @@ -11380,282 +11515,291 @@ packages: timestamp: 1707101388552 - kind: conda name: libpng - version: 1.6.43 - build: h19919ed_0 + version: 1.6.44 + build: h3ca93ac_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.43-h19919ed_0.conda - sha256: 6ad31bf262a114de5bbe0c6ba73b29ed25239d0f46f9d59700310d2ea0b3c142 - md5: 77e398acc32617a0384553aea29e866b + url: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.44-h3ca93ac_0.conda + sha256: 0d3d6ff9225f6918ac225e3839c0d91e5af1da08a4ebf59cac1bfd86018db945 + md5: 639ac6b55a40aa5de7b8c1b4d78f9e81 depends: - - libzlib >=1.2.13,<2.0.0a0 + - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: zlib-acknowledgement purls: [] - size: 347514 - timestamp: 1708780763195 + size: 348933 + timestamp: 1726235196095 - kind: conda name: libpng - version: 1.6.43 - build: h2797004_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda - sha256: 502f6ff148ac2777cc55ae4ade01a8fc3543b4ffab25c4e0eaa15f94e90dd997 - md5: 009981dd9cfcaa4dbfa25ffaed86bcae + version: 1.6.44 + build: h4b8f8c9_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.44-h4b8f8c9_0.conda + sha256: 12b44e58f8832798d7a5c0a7480c95e905dbd6c3558dec09739062411f9e08d1 + md5: f32ac2c8dd390dbf169f550887ed09d9 depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 + - __osx >=10.13 + - libzlib >=1.3.1,<2.0a0 license: zlib-acknowledgement purls: [] - size: 288221 - timestamp: 1708780443939 + size: 268073 + timestamp: 1726234803010 - kind: conda name: libpng - version: 1.6.43 - build: h92b6c6a_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.43-h92b6c6a_0.conda - sha256: 13e646d24b5179e6b0a5ece4451a587d759f55d9a360b7015f8f96eff4524b8f - md5: 65dcddb15965c9de2c0365cb14910532 + version: 1.6.44 + build: hadc24fc_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda + sha256: e5b14f7a01c2db4362d8591f42f82f336ed48d5e4079e4d1f65d0c2a3637ea78 + md5: f4cc49d7aa68316213e4b12be35308d1 depends: - - libzlib >=1.2.13,<2.0.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libzlib >=1.3.1,<2.0a0 license: zlib-acknowledgement purls: [] - size: 268524 - timestamp: 1708780496420 + size: 290661 + timestamp: 1726234747153 - kind: conda name: libpq - version: '16.4' - build: h2d7952a_1 - build_number: 1 + version: '17.0' + build: h04577a9_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda - sha256: f7a425b8bc94a541f9c43120734305705ffaa3054470e49fbdea0f166fc3f371 - md5: 7e3173fd1299939a02ebf9ec32aa77c4 + url: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_2.conda + sha256: 48ac53293aba8f8590c16b530def8434033f1f08fe4eaaa897756563b50da7cd + md5: c00807c15530f0cb373a89fd5ead6599 depends: - __glibc >=2.17,<3.0.a0 + - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - libgcc >=13 - - openssl >=3.3.1,<4.0a0 + - openldap >=2.6.8,<2.7.0a0 + - openssl >=3.3.2,<4.0a0 license: PostgreSQL purls: [] - size: 2510669 - timestamp: 1724948449731 + size: 2621446 + timestamp: 1727852819478 - kind: conda name: libpq - version: '16.4' - build: h75a757a_1 - build_number: 1 + version: '17.0' + build: h6e894e8_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libpq-16.4-h75a757a_1.conda - sha256: 161d92de944fefc60414b44f1672d2917dac1e5996f9363635301589b5ee0a94 - md5: 3316ac3fbb20afd3e2a18d6c4264885f + url: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-h6e894e8_2.conda + sha256: 103ac6f8b717ba11faf9e158321a81998d69244428fc75fa220ba269dc6222a0 + md5: c95c34362d0f7b12f9d691d378bcc902 depends: - __osx >=10.13 + - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - openssl >=3.3.1,<4.0a0 + - openldap >=2.6.8,<2.7.0a0 + - openssl >=3.3.2,<4.0a0 license: PostgreSQL purls: [] - size: 2340921 - timestamp: 1724948593326 + size: 2523581 + timestamp: 1727853063006 - kind: conda name: libpq - version: '16.4' - build: hab9416b_1 - build_number: 1 + version: '17.0' + build: h7ec079e_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libpq-16.4-hab9416b_1.conda - sha256: cc3adc6165e65fa3eabf48219e22bf33f7afe98369f88c5ba0629b4958b61067 - md5: 6b8e08902d6f4d581e42f9862ba2bc2a + url: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_2.conda + sha256: e28e770d22a72966d06f84e060eae1cb027a750e583b8e95f6a02568b074e4cd + md5: c3048e5c8f6b907005e5d2bf5ecbd1a6 depends: + - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: PostgreSQL purls: [] - size: 3498610 - timestamp: 1724949283448 + size: 3834481 + timestamp: 1727853577777 - kind: conda name: libprotobuf - version: 4.25.3 - build: h08a7969_0 + version: 5.27.5 + build: h5b01275_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda - sha256: 70e0eef046033af2e8d21251a785563ad738ed5281c74e21c31c457780845dcd - md5: 6945825cebd2aeb16af4c69d97c32c13 + url: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.27.5-h5b01275_2.conda + sha256: 79ac9726cd0a1cb1ba335f7fc7ccac5f679a66d71d9553ca88a805b8787d55ce + md5: 66ed3107adbdfc25ba70454ba11e6d1e depends: + - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libgcc >=13 + - libstdcxx >=13 + - libzlib >=1.3.1,<2.0a0 license: BSD-3-Clause license_family: BSD purls: [] - size: 2811207 - timestamp: 1709514552541 + size: 2940269 + timestamp: 1727424395109 - kind: conda name: libprotobuf - version: 4.25.3 - build: h4e4d658_0 + version: 5.27.5 + build: h62b0dff_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-4.25.3-h4e4d658_0.conda - sha256: 3f126769fb5820387d436370ad48600e05d038a28689fdf9988b64e1059947a8 - md5: 57b7ee4f1fd8573781cfdabaec4a7782 + url: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-5.27.5-h62b0dff_2.conda + sha256: ac77bce3b9a58e6fa72bed339af0d47faf1dec3bc717e4e05e2e729dc42bd2b3 + md5: e3b68d9a164d807f70df49e17bc54931 depends: - __osx >=10.13 - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libcxx >=16 - - libzlib >=1.2.13,<2.0.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libcxx >=17 + - libzlib >=1.3.1,<2.0a0 license: BSD-3-Clause license_family: BSD purls: [] - size: 2216001 - timestamp: 1709514908146 + size: 2332719 + timestamp: 1727424047974 - kind: conda name: libprotobuf - version: 4.25.3 - build: h503648d_0 + version: 5.27.5 + build: hcaed137_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-4.25.3-h503648d_0.conda - sha256: 5d4c5592be3994657ebf47e52f26b734cc50b0ea9db007d920e2e31762aac216 - md5: 4da7de0ba35777742edf67bf7a1075df + url: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-5.27.5-hcaed137_2.conda + sha256: f039a07e6a52542e298ad0cf39d95d261f02c62256c82a60e246f291b2535e1b + md5: 0155746155856bc39091b5242c9b52d7 depends: - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libzlib >=1.2.13,<2.0.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: [] - size: 5650604 - timestamp: 1709514804631 + size: 6090012 + timestamp: 1727424307861 - kind: conda name: libre2-11 - version: 2023.09.01 - build: h5a48ba9_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda - sha256: 3f3c65fe0e9e328b4c1ebc2b622727cef3e5b81b18228cfa6cf0955bc1ed8eff - md5: 41c69fba59d495e8cf5ffda48a607e35 + version: 2023.11.01 + build: h4eb7d71_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.11.01-h4eb7d71_0.conda + sha256: a0a1ea69a63357b1aac9d8eda6b98425fc2bb5ae9650233a41455e9290b1aa05 + md5: 985ae0f827e74151966f2977c1cbe67d depends: - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libabseil >=20240722.0,<20240723.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - re2 2023.09.01.* + - re2 2023.11.01.* license: BSD-3-Clause license_family: BSD purls: [] - size: 232603 - timestamp: 1708946763521 + size: 262162 + timestamp: 1728466651480 - kind: conda name: libre2-11 - version: 2023.09.01 - build: h81f5012_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.09.01-h81f5012_2.conda - sha256: 384b72a09bd4bb29c1aa085110b2f940dba431587ffb4e2c1a28f605887a1867 - md5: c5c36ec64e3c86504728c38b79011d08 + version: 2023.11.01 + build: hbbce691_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.11.01-hbbce691_0.conda + sha256: e06eb02be1dd3a03bb9e2897c2067e26dc32a526a7fdf1f3b1ba3557b3f168d2 + md5: 18ca8e125b84679e416ba2803ff4356e depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - libcxx >=16 + - libabseil >=20240722.0,<20240723.0a0 + - libgcc >=13 + - libstdcxx >=13 constrains: - - re2 2023.09.01.* + - re2 2023.11.01.* license: BSD-3-Clause license_family: BSD purls: [] - size: 184017 - timestamp: 1708947106275 + size: 240866 + timestamp: 1728466437052 - kind: conda name: libre2-11 - version: 2023.09.01 - build: hf8d8778_2 - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.09.01-hf8d8778_2.conda - sha256: 04331dad30a076ebb24c683197a5feabf4fd9be0fa0e06f416767096f287f900 - md5: cf54cb5077a60797d53a132d37af25fc + version: 2023.11.01 + build: hd530cb8_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.11.01-hd530cb8_0.conda + sha256: d25b112d4384c269fedb04c420ab944fced743ad6f85d2cedac137a86e455e77 + md5: 40e38687b043af900e79f0a10c54a0ca depends: + - __osx >=10.13 - libabseil * cxx17* - - libabseil >=20240116.1,<20240117.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libabseil >=20240722.0,<20240723.0a0 + - libcxx >=17 constrains: - - re2 2023.09.01.* + - re2 2023.11.01.* license: BSD-3-Clause license_family: BSD purls: [] - size: 256561 - timestamp: 1708947458481 + size: 185921 + timestamp: 1728466515046 - kind: conda name: librttopo version: 1.1.0 - build: h6c42fcb_16 - build_number: 16 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-h6c42fcb_16.conda - sha256: 417d468a42860bee6d487a39603740c3650fb7eae03b694a9bddada9ef5d1017 - md5: 4476d717f460b45f5033206bbb84f3f5 + build: h97f6797_17 + build_number: 17 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda + sha256: 1fb8a71bdbc236b8e74f0475887786735d5fa6f5d76d9a4135021279c7ff54b8 + md5: e16e9b1333385c502bf915195f421934 depends: - - geos >=3.12.2,<3.12.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __glibc >=2.17,<3.0.a0 + - geos >=3.13.0,<3.13.1.0a0 + - libgcc >=13 + - libstdcxx >=13 license: GPL-2.0-or-later license_family: GPL purls: [] - size: 407420 - timestamp: 1720347953921 + size: 231770 + timestamp: 1727338518657 - kind: conda name: librttopo version: 1.1.0 - build: hc670b87_16 - build_number: 16 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda - sha256: 65bfd9f8915b1fc2523c58bf556dc2b9ed6127b7c6877ed2841c67b717f6f924 - md5: 3d9f3a2e5d7213c34997e4464d2f938c + build: hd4c2148_17 + build_number: 17 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-hd4c2148_17.conda + sha256: 0f4a1c8ed579f96ccb73245b4002d7152a2a8ecd05a01d49901c5d280561f766 + md5: 06ea16b8c60b4ce1970c06191f8639d4 depends: - - __glibc >=2.17,<3.0.a0 - - geos >=3.12.2,<3.12.3.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - geos >=3.13.0,<3.13.1.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: GPL-2.0-or-later license_family: GPL purls: [] - size: 231637 - timestamp: 1720347750456 + size: 404515 + timestamp: 1727265928370 - kind: conda name: librttopo version: 1.1.0 - build: he2ba7a0_16 - build_number: 16 + build: hdfb80b9_17 + build_number: 17 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-he2ba7a0_16.conda - sha256: 907f602ad39172a98e3062c0d6616535075f5227435753fe2c843eb10891403c - md5: 80cc407788999eb3cd5a3651981e55fd + url: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-hdfb80b9_17.conda + sha256: 683ec76fcc035f3803aedbffdc4e8ab62fbde360bfaa73f3693eeb429c48b029 + md5: 627b89a9764485ebace5ebe42b6e6ab4 depends: - __osx >=10.13 - - geos >=3.12.2,<3.12.3.0a0 - - libcxx >=16 + - geos >=3.13.0,<3.13.1.0a0 + - libcxx >=17 license: GPL-2.0-or-later license_family: GPL purls: [] - size: 213675 - timestamp: 1720347819147 + size: 213348 + timestamp: 1727265795635 - kind: conda name: libsodium version: 1.0.20 @@ -11703,49 +11847,49 @@ packages: - kind: conda name: libspatialite version: 5.1.0 - build: h15fa968_9 - build_number: 9 + build: h1b4f908_11 + build_number: 11 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda - sha256: 541eadcc9f2e3f5c7f801265563412930c9c65e22c21634df96a8cd6465a385e - md5: 4957a903bd6a68cc2e53e47476f9c6f4 + url: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_11.conda + sha256: 11d8537d472c5fc25176fda7af6b9aa47f37ba98d0467b77cb713be18ed847ea + md5: 43a7f3df7d100e8fc280e6636680a870 depends: - __glibc >=2.17,<3.0.a0 - freexl >=2 - freexl >=2.0.0,<3.0a0 - - geos >=3.12.2,<3.12.3.0a0 - - libgcc-ng >=12 + - geos >=3.13.0,<3.13.1.0a0 + - libgcc >=13 - librttopo >=1.1.0,<1.2.0a0 - - libsqlite >=3.46.0,<4.0a0 - - libstdcxx-ng >=12 + - libsqlite >=3.46.1,<4.0a0 + - libstdcxx >=13 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 - sqlite - zlib license: MPL-1.1 license_family: MOZILLA purls: [] - size: 3495758 - timestamp: 1722337893853 + size: 4045908 + timestamp: 1727341751247 - kind: conda name: libspatialite version: 5.1.0 - build: hab0cb6d_9 - build_number: 9 + build: h939089a_11 + build_number: 11 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-hab0cb6d_9.conda - sha256: 2b58e62603334b7d3951b93cdee9dd1fe3cd3c18aaafa65ea0f132f780adeb6e - md5: 934f10287da9c46f761abf0ee5f88dd3 + url: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-h939089a_11.conda + sha256: 76da01457b92be57ac0635cec2681c5423a46252713b144391c14aa0dffe61ba + md5: 3ff7b70e2c517f3a43f0b3f87475915a depends: - freexl >=2 - freexl >=2.0.0,<3.0a0 - - geos >=3.12.2,<3.12.3.0a0 + - geos >=3.13.0,<3.13.1.0a0 - librttopo >=1.1.0,<1.2.0a0 - - libsqlite >=3.46.0,<4.0a0 + - libsqlite >=3.46.1,<4.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 - sqlite - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -11754,36 +11898,36 @@ packages: license: MPL-1.1 license_family: MOZILLA purls: [] - size: 8283487 - timestamp: 1722338203533 + size: 8293459 + timestamp: 1727341947641 - kind: conda name: libspatialite version: 5.1.0 - build: hdc25a2c_9 - build_number: 9 + build: hc43c327_11 + build_number: 11 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hdc25a2c_9.conda - sha256: 97f2915388c7b216202aff37bb5163681e96eef0991a7366ccdd3e228d2b6aa6 - md5: 230006cfdaf8e653d16e91e6a9a57c98 + url: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hc43c327_11.conda + sha256: 1e392f1f5544ffeb9ce724d06602a8f8062529824954d11b63d4ae01f45a9b49 + md5: 59c3e269e76ec0e03802ddea2b4e44a0 depends: - __osx >=10.13 - freexl >=2 - freexl >=2.0.0,<3.0a0 - - geos >=3.12.2,<3.12.3.0a0 - - libcxx >=16 + - geos >=3.13.0,<3.13.1.0a0 + - libcxx >=17 - libiconv >=1.17,<2.0a0 - librttopo >=1.1.0,<1.2.0a0 - - libsqlite >=3.46.0,<4.0a0 + - libsqlite >=3.46.1,<4.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 - sqlite - zlib license: MPL-1.1 license_family: MOZILLA purls: [] - size: 3148395 - timestamp: 1722338108366 + size: 3315985 + timestamp: 1727341824716 - kind: conda name: libsqlite version: 3.46.1 @@ -11917,125 +12061,122 @@ packages: timestamp: 1724801897766 - kind: conda name: libthrift - version: 0.20.0 - build: h0e7cc3e_1 - build_number: 1 + version: 0.21.0 + build: h0e7cc3e_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda - sha256: 3e70dfda31a3ce28310c86cc0001f20abb78c917502e12c94285a1337fe5b9f0 - md5: d0ed81c4591775b70384f4cc78e05cd1 + url: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda + sha256: ebb395232973c18745b86c9a399a4725b2c39293c9a91b8e59251be013db42f0 + md5: dcb95c0a98ba9ff737f7ae482aef7833 depends: - __glibc >=2.17,<3.0.a0 - libevent >=2.1.12,<2.1.13.0a0 - - libgcc-ng >=13 - - libstdcxx-ng >=13 + - libgcc >=13 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: APACHE purls: [] - size: 417404 - timestamp: 1724652349098 + size: 425773 + timestamp: 1727205853307 - kind: conda name: libthrift - version: 0.20.0 - build: h75589b3_1 - build_number: 1 + version: 0.21.0 + build: h75589b3_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.20.0-h75589b3_1.conda - sha256: a1f40fcb9970fbfd6d0b825841b4127cf7dd7c54199d0b49bdbcd838b66f3b7a - md5: c20b01aa07ece86a237c580f7ba56923 + url: https://conda.anaconda.org/conda-forge/osx-64/libthrift-0.21.0-h75589b3_0.conda + sha256: 3f82eddd6de435a408538ac81a7a2c0c155877534761ec9cd7a2906c005cece2 + md5: 7a472cd20d9ae866aeb6e292b33381d6 depends: - __osx >=10.13 - libcxx >=17 - libevent >=2.1.12,<2.1.13.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: APACHE purls: [] - size: 324391 - timestamp: 1724657549149 + size: 332651 + timestamp: 1727206546431 - kind: conda name: libthrift - version: 0.20.0 - build: hbe90ef8_1 - build_number: 1 + version: 0.21.0 + build: hbe90ef8_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.20.0-hbe90ef8_1.conda - sha256: 77f92cbacb886f671fdf0bc2fac13f423ba442d0c3171ce3e573ed05f5c8980e - md5: e9f49c00773250da4f622694b7f83f25 + url: https://conda.anaconda.org/conda-forge/win-64/libthrift-0.21.0-hbe90ef8_0.conda + sha256: 81ca4873ba09055c307f8777fb7d967b5c26291f38095785ae52caed75946488 + md5: 7699570e1f97de7001a7107aabf2d677 depends: - libevent >=2.1.12,<2.1.13.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: [] - size: 612714 - timestamp: 1724653005481 + size: 633857 + timestamp: 1727206429954 - kind: conda name: libtiff - version: 4.6.0 - build: h46a8edc_4 - build_number: 4 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h46a8edc_4.conda - sha256: 8d42dd7c6602187d4351fc3b69ff526f1c262bfcbfd6ce05d06008f4e0b99b58 - md5: a7e3a62981350e232e0e7345b5aea580 + version: 4.7.0 + build: h583c2ba_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.7.0-h583c2ba_1.conda + sha256: 4d58c695dfed6f308d0fd3ff552e0078bb98bc0be2ea0bf55820eb6e86fa5355 + md5: 4b78bcdcc8780cede8b3d090deba874d depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - lerc >=4.0.0,<5.0a0 - - libdeflate >=1.21,<1.22.0a0 - - libgcc-ng >=12 + - libcxx >=17 + - libdeflate >=1.22,<1.23.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - - libstdcxx-ng >=12 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - xz >=5.2.6,<6.0a0 - zstd >=1.5.6,<1.6.0a0 license: HPND purls: [] - size: 282236 - timestamp: 1722871642189 + size: 395980 + timestamp: 1728232302162 - kind: conda name: libtiff - version: 4.6.0 - build: h603087a_4 - build_number: 4 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.6.0-h603087a_4.conda - sha256: 3b853901835167406f1c576207ec0294da4aade69c170a6e29206d454f42c259 - md5: 362626a2aacb976ec89c91b99bfab30b + version: 4.7.0 + build: he137b08_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda + sha256: 9890121db85f6ef463fe12eb04ef1471176e3ef3b5e2d62e8d6dac713df00df4 + md5: 63872517c98aa305da58a757c443698e depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - lerc >=4.0.0,<5.0a0 - - libcxx >=16 - - libdeflate >=1.21,<1.22.0a0 + - libdeflate >=1.22,<1.23.0a0 + - libgcc >=13 - libjpeg-turbo >=3.0.0,<4.0a0 + - libstdcxx >=13 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - xz >=5.2.6,<6.0a0 - zstd >=1.5.6,<1.6.0a0 license: HPND purls: [] - size: 257905 - timestamp: 1722871821174 + size: 428156 + timestamp: 1728232228989 - kind: conda name: libtiff - version: 4.6.0 - build: hb151862_4 - build_number: 4 + version: 4.7.0 + build: hfc51747_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.6.0-hb151862_4.conda - sha256: 1d5a8972f344da2e81b5a27ac0eda977803351151b8923f16cbc056515f5b8c6 - md5: 7d35d9aa8f051d548116039f5813c8ec + url: https://conda.anaconda.org/conda-forge/win-64/libtiff-4.7.0-hfc51747_1.conda + sha256: 902cb9f7f54d17dcfd54ce050b1ce2bc944b9bbd1748913342c2ea1e1140f8bb + md5: eac317ed1cc6b9c0af0c27297e364665 depends: - lerc >=4.0.0,<5.0a0 - - libdeflate >=1.21,<1.22.0a0 + - libdeflate >=1.22,<1.23.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 @@ -12045,8 +12186,8 @@ packages: - zstd >=1.5.6,<1.6.0a0 license: HPND purls: [] - size: 784657 - timestamp: 1722871883822 + size: 978865 + timestamp: 1728232594877 - kind: conda name: libutf8proc version: 2.8.0 @@ -12158,65 +12299,81 @@ packages: purls: [] size: 438953 timestamp: 1713199854503 +- kind: conda + name: libwinpthread + version: 12.0.0.r4.gg4f2fc60ca + build: h57928b3_8 + build_number: 8 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libwinpthread-12.0.0.r4.gg4f2fc60ca-h57928b3_8.conda + sha256: 6d5e158813ab8d553fbb0fedd0abe7bf92970b0be3a9ddf12da0f6cbad78f506 + md5: 03cccbba200ee0523bde1f3dad60b1f3 + depends: + - ucrt + constrains: + - pthreads-win32 <0.0a0 + - msys2-conda-epoch <0.0a0 + license: MIT AND BSD-3-Clause-Clear + purls: [] + size: 35433 + timestamp: 1724681489463 - kind: conda name: libxcb - version: '1.16' - build: h00291cd_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.16-h00291cd_1.conda - sha256: 2cd6b74fa4b3ef9a3fe7f92271eb34346af673509aa86739e9f04bf72015f841 - md5: c989b18131ab79fdc67e42473d53d545 + version: 1.17.0 + build: h0e4246c_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.17.0-h0e4246c_0.conda + sha256: 08dec73df0e161c96765468847298a420933a36bc4f09b50e062df8793290737 + md5: a69bbf778a462da324489976c84cfc8c depends: - - __osx >=10.13 + - libgcc >=13 + - libwinpthread >=12.0.0.r4.gg4f2fc60ca - pthread-stubs + - ucrt >=10.0.20348.0 - xorg-libxau >=1.0.11,<2.0a0 - xorg-libxdmcp license: MIT license_family: MIT purls: [] - size: 323886 - timestamp: 1724419422116 + size: 1208687 + timestamp: 1727279378819 - kind: conda name: libxcb - version: '1.16' - build: h013a479_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libxcb-1.16-h013a479_1.conda - sha256: abae56e12a4c62730b899fdfb82628a9ac171c4ce144fc9f34ae024957a82a0e - md5: f0b599acdc82d5bc7e3b105833e7c5c8 + version: 1.17.0 + build: h8a09558_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda + sha256: 666c0c431b23c6cec6e492840b176dde533d48b7e6fb8883f5071223433776aa + md5: 92ed62436b625154323d40d5f2f11dd7 depends: - - m2w64-gcc-libs - - m2w64-gcc-libs-core + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - pthread-stubs - xorg-libxau >=1.0.11,<2.0a0 - xorg-libxdmcp license: MIT license_family: MIT purls: [] - size: 989459 - timestamp: 1724419883091 + size: 395888 + timestamp: 1727278577118 - kind: conda name: libxcb - version: '1.16' - build: hb9d3cd8_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda - sha256: 33aa5fc997468b07ab3020b142eacc5479e4e2c2169f467b20ab220f33dd08de - md5: 3601598f0db0470af28985e3e7ad0158 + version: 1.17.0 + build: hf1f96e2_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.17.0-hf1f96e2_0.conda + sha256: 8896cd5deff6f57d102734f3e672bc17120613647288f9122bec69098e839af7 + md5: bbeca862892e2898bdb45792a61c4afc depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=13 + - __osx >=10.13 - pthread-stubs - xorg-libxau >=1.0.11,<2.0a0 - xorg-libxdmcp license: MIT license_family: MIT purls: [] - size: 395570 - timestamp: 1724419104778 + size: 323770 + timestamp: 1727278927545 - kind: conda name: libxcrypt version: 4.4.36 @@ -12244,7 +12401,7 @@ packages: depends: - libgcc-ng >=12 - libstdcxx-ng >=12 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 - libxml2 >=2.12.7,<3.0a0 - xkeyboard-config - xorg-libxau >=1.0.11,<2.0a0 @@ -12317,11 +12474,26 @@ packages: - kind: conda name: libxslt version: 1.1.39 - build: h3df6e99_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libxslt-1.1.39-h3df6e99_0.conda - sha256: 6e3d99466d2076c35e7ac8dcdfe604da3d593f55b74a5b8e96c2b2ff63c247aa - md5: 279ee338c9b34871d578cb3c7aa68f70 + build: h03b04e6_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libxslt-1.1.39-h03b04e6_0.conda + sha256: decfc5614a10231a17543b7366616fb2d88c14be6dd9dd5ecde63aa9a5acfb9e + md5: a6e0cec6b3517ffc6b5d36a920fc9312 + depends: + - libxml2 >=2.12.1,<3.0.0a0 + license: MIT + license_family: MIT + purls: [] + size: 231368 + timestamp: 1701628933115 +- kind: conda + name: libxslt + version: 1.1.39 + build: h3df6e99_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libxslt-1.1.39-h3df6e99_0.conda + sha256: 6e3d99466d2076c35e7ac8dcdfe604da3d593f55b74a5b8e96c2b2ff63c247aa + md5: 279ee338c9b34871d578cb3c7aa68f70 depends: - libxml2 >=2.12.1,<3.0.0a0 - ucrt >=10.0.20348.0 @@ -12350,136 +12522,134 @@ packages: timestamp: 1701628814990 - kind: conda name: libzip - version: 1.10.1 - build: h1d365fa_3 - build_number: 3 + version: 1.11.1 + build: h25f2845_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libzip-1.10.1-h1d365fa_3.conda - sha256: 221698b52dd7a3dcfc67ff9460e9c8649fc6c86506a2a2ab6f57b97e7489bb9f - md5: 5c629cd12d89e2856c17b1dc5fcf44a4 + url: https://conda.anaconda.org/conda-forge/win-64/libzip-1.11.1-h25f2845_0.conda + sha256: 3cd9834e69a7b24c485a819aa5e1db227326c2626c530149ca8639f6c6816829 + md5: 31bed00bb0fde2d26ffb0f6a75d10fdb depends: - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.1.2,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.2,<4.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: [] - size: 146434 - timestamp: 1694417117772 + size: 146590 + timestamp: 1726786953987 - kind: conda name: libzip - version: 1.10.1 - build: h2629f0a_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda - sha256: 84e93f189072dcfcbe77744f19c7e4171523fbecfaba7352e5a23bbe014574c7 - md5: ac79812548e7e8cf61f7b0abdef01d3b + version: 1.11.1 + build: h3116616_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.11.1-h3116616_0.conda + sha256: 99c1f7f1e88e1a0d2eafe8efb31e86a51ff7a91fd53d728f13fbf60dc81b5039 + md5: 316d1b00a2a65224325065d2b9661105 depends: + - __osx >=10.13 - bzip2 >=1.0.8,<2.0a0 - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.1.2,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.2,<4.0a0 license: BSD-3-Clause license_family: BSD purls: [] - size: 107198 - timestamp: 1694416433629 + size: 128966 + timestamp: 1726786610588 - kind: conda name: libzip - version: 1.10.1 - build: hc158999_3 - build_number: 3 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.10.1-hc158999_3.conda - sha256: 0689e4a6e67e80027e43eefb8a365273405a01f5ab2ece97319155b8be5d64f6 - md5: 6112b3173f3aa2f12a8f40d07a77cc35 + version: 1.11.1 + build: hf83b1b0_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda + sha256: d2b20d0a307beef9d313f56cfcf3ce74d1a53b728124cecee0b3bea657bbf30b + md5: e8536ec89df2aec5f65fefcf4ccd58ba depends: + - __glibc >=2.17,<3.0.a0 - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.1.2,<4.0a0 + - libgcc >=13 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.3.2,<4.0a0 license: BSD-3-Clause license_family: BSD purls: [] - size: 127599 - timestamp: 1694416738467 + size: 109414 + timestamp: 1726786452201 - kind: conda name: libzlib version: 1.3.1 - build: h2466b09_1 - build_number: 1 + build: h2466b09_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda - sha256: b13846a54a15243e15f96fec06b526d8155adc6a1ac2b6ed47a88f6a71a94b68 - md5: d4483ca8afc57ddf1f6dded53b36c17f + url: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda + sha256: ba945c6493449bed0e6e29883c4943817f7c79cbff52b83360f7b341277c6402 + md5: 41fbfac52c601159df6c01f875de31b9 depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 constrains: - - zlib 1.3.1 *_1 + - zlib 1.3.1 *_2 license: Zlib license_family: Other purls: [] - size: 56186 - timestamp: 1716874730539 + size: 55476 + timestamp: 1727963768015 - kind: conda name: libzlib version: 1.3.1 - build: h4ab18f5_1 - build_number: 1 + build: hb9d3cd8_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - sha256: adf6096f98b537a11ae3729eaa642b0811478f0ea0402ca67b5108fe2cb0010d - md5: 57d7dc60e9325e3de37ff8dffd18e814 + url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + sha256: d4bfe88d7cb447768e31650f06257995601f89076080e76df55e3112d4e47dc4 + md5: edb0dca6bc32e4f4789199455a1dbeb8 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 constrains: - - zlib 1.3.1 *_1 + - zlib 1.3.1 *_2 license: Zlib license_family: Other purls: [] - size: 61574 - timestamp: 1716874187109 + size: 60963 + timestamp: 1727963148474 - kind: conda name: libzlib version: 1.3.1 - build: h87427d6_1 - build_number: 1 + build: hd23fc13_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda - sha256: 80a62db652b1da0ccc100812a1d86e94f75028968991bfb17f9536f3aa72d91d - md5: b7575b5aa92108dcc9aaab0f05f2dbce + url: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + sha256: 8412f96504fc5993a63edf1e211d042a1fd5b1d51dedec755d2058948fcced09 + md5: 003a54a4e32b02f7355b50a837e699da depends: - __osx >=10.13 constrains: - - zlib 1.3.1 *_1 + - zlib 1.3.1 *_2 license: Zlib license_family: Other purls: [] - size: 57372 - timestamp: 1716874211519 + size: 57133 + timestamp: 1727963183990 - kind: conda name: llvm-openmp - version: 18.1.8 - build: h15ab845_1 - build_number: 1 + version: 19.1.1 + build: h545e0da_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-18.1.8-h15ab845_1.conda - sha256: 06a245abb6e6d8d6662a35ad162eacb39f431349edf7cea9b1ff73b2da213c58 - md5: ad0afa524866cc1c08b436865d0ae484 + url: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.1-h545e0da_0.conda + sha256: 7e15f5ac89e750dadbc6fe81dc2909dd056c7324c72379a8440b57a6174a1146 + md5: 3f3e4a599dd2638a945fc5821090db07 depends: - __osx >=10.13 constrains: - - openmp 18.1.8|18.1.8.* + - openmp 19.1.1|19.1.1.* license: Apache-2.0 WITH LLVM-exception - license_family: APACHE purls: [] - size: 300358 - timestamp: 1723605369115 + size: 305199 + timestamp: 1728517141555 - kind: conda name: llvmlite version: 0.43.0 @@ -12724,97 +12894,15 @@ packages: purls: [] size: 171416 timestamp: 1713515738503 -- kind: conda - name: m2w64-gcc-libgfortran - version: 5.3.0 - build: '6' - build_number: 6 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 - sha256: 9de95a7996d5366ae0808eef2acbc63f9b11b874aa42375f55379e6715845dc6 - md5: 066552ac6b907ec6d72c0ddab29050dc - depends: - - m2w64-gcc-libs-core - - msys2-conda-epoch ==20160418 - license: GPL, LGPL, FDL, custom - purls: [] - size: 350687 - timestamp: 1608163451316 -- kind: conda - name: m2w64-gcc-libs - version: 5.3.0 - build: '7' - build_number: 7 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2 - sha256: 3bd1ab02b7c89a5b153a17be03b36d833f1517ff2a6a77ead7c4a808b88196aa - md5: fe759119b8b3bfa720b8762c6fdc35de - depends: - - m2w64-gcc-libgfortran - - m2w64-gcc-libs-core - - m2w64-gmp - - m2w64-libwinpthread-git - - msys2-conda-epoch ==20160418 - license: GPL3+, partial:GCCRLE, partial:LGPL2+ - purls: [] - size: 532390 - timestamp: 1608163512830 -- kind: conda - name: m2w64-gcc-libs-core - version: 5.3.0 - build: '7' - build_number: 7 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2 - sha256: 58afdfe859ed2e9a9b1cc06bc408720cb2c3a6a132e59d4805b090d7574f4ee0 - md5: 4289d80fb4d272f1f3b56cfe87ac90bd - depends: - - m2w64-gmp - - m2w64-libwinpthread-git - - msys2-conda-epoch ==20160418 - license: GPL3+, partial:GCCRLE, partial:LGPL2+ - purls: [] - size: 219240 - timestamp: 1608163481341 -- kind: conda - name: m2w64-gmp - version: 6.1.0 - build: '2' - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2 - sha256: 7e3cd95f554660de45f8323fca359e904e8d203efaf07a4d311e46d611481ed1 - md5: 53a1c73e1e3d185516d7e3af177596d9 - depends: - - msys2-conda-epoch ==20160418 - license: LGPL3 - purls: [] - size: 743501 - timestamp: 1608163782057 -- kind: conda - name: m2w64-libwinpthread-git - version: 5.0.0.4634.697f757 - build: '2' - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - sha256: f63a09b2cae7defae0480f1740015d6235f1861afa6fe2e2d3e10bd0d1314ee0 - md5: 774130a326dee16f1ceb05cc687ee4f0 - depends: - - msys2-conda-epoch ==20160418 - license: MIT, BSD - purls: [] - size: 31928 - timestamp: 1608166099896 - kind: conda name: mapclassify - version: 2.8.0 + version: 2.8.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.0-pyhd8ed1ab_0.conda - sha256: e24778b8e965ae188ef268c52f4b55e340b6a194db57481f640f7d2b0a6e57a2 - md5: 61730f7e741f2d98441bfa44979f2a33 + url: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda + sha256: ce49505ac5c1d2d0bab6543b057c7cf698b0135ef92cd0eb151a41ea09d24c8c + md5: e75920f936efb86f64517d144d610107 depends: - networkx >=2.7 - numpy >=1.23 @@ -12826,8 +12914,8 @@ packages: license_family: BSD purls: - pkg:pypi/mapclassify?source=hash-mapping - size: 56342 - timestamp: 1723589782579 + size: 58204 + timestamp: 1727220839687 - kind: conda name: markdown-it-py version: 3.0.0 @@ -12848,58 +12936,58 @@ packages: timestamp: 1686175179621 - kind: conda name: markupsafe - version: 2.1.5 - build: py312h4389bb4_1 + version: 3.0.1 + build: py312h178313f_1 build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312h4389bb4_1.conda - sha256: e0445364902a4c0ab45b6683a09459b574466198f4ad81919bae4cd291e75208 - md5: 79843153b0fa98a7e63b9d9ed525596b + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.1-py312h178313f_1.conda + sha256: d65455297e005c73811848fb3b25a9570d5712c972c7302198ca72698f5f5341 + md5: 490afd4d3627a7f999b9d633c4b6c229 depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 29136 - timestamp: 1724959968176 + size: 24814 + timestamp: 1728489245950 - kind: conda name: markupsafe - version: 2.1.5 - build: py312h66e93f0_1 + version: 3.0.1 + build: py312h31fea79_1 build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h66e93f0_1.conda - sha256: 5c88cd6e19437015de16bde30dd25791aca63ac9cbb8d66b65f365ecff1b235b - md5: 80b79ce0d3dc127e96002dfdcec0a2a5 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.1-py312h31fea79_1.conda + sha256: 128416033353673a4e8094e39134dcc5be5ff6ff05b8eb48467d7bfd18cbe354 + md5: 512dc4954c7fe66ed5c2c622a3eb745a depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 26772 - timestamp: 1724959630484 + size: 27734 + timestamp: 1728489618033 - kind: conda name: markupsafe - version: 2.1.5 - build: py312hb553811_1 + version: 3.0.1 + build: py312hca98d7e_1 build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312hb553811_1.conda - sha256: 2382cc541f3bbe912180861754aceb2ed180004e361a7c66ac2b1a71a7c2fba8 - md5: 2b9fc64d656299475c648d7508e14943 + url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.1-py312hca98d7e_1.conda + sha256: 848e8be9b74d20a90c120f0f3332df5baad07514edab44e4e8e561b89fbf261b + md5: 136bab776f3fb3ccc6e11f95cf71e658 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -12910,16 +12998,17 @@ packages: license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 25414 - timestamp: 1724959688117 + size: 23791 + timestamp: 1728489239217 - kind: conda name: matplotlib version: 3.9.2 - build: py312h2e8e312_0 + build: py312h2e8e312_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_0.conda - sha256: 2ad3b3e80dd897efa7c04c614399baf4f79a08ade8d8e514b0866727d9d03056 - md5: f87a4701dbbf6dcdb559f2dc5b3b0b95 + url: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_1.conda + sha256: b83f0ab8024cb392f56c61427d8ca05a09ea2be2e7bd47870984e99322cb5ec5 + md5: a27a47ecb8ad494b3edd0746b9dcb362 depends: - matplotlib-base >=3.9.2,<3.9.3.0a0 - pyside6 >=6.7.2 @@ -12929,16 +13018,17 @@ packages: license: PSF-2.0 license_family: PSF purls: [] - size: 9152 - timestamp: 1723760944640 + size: 9231 + timestamp: 1726165963481 - kind: conda name: matplotlib version: 3.9.2 - build: py312h7900ff3_0 + build: py312h7900ff3_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_0.conda - sha256: b728fe3bb3525fc2a2d37b81e5fee1c697fa6ce380da8c1dbd4378ff0a3bc299 - md5: 44c07eccf73f549b8ea5c9aacfe3ad0a + url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_1.conda + sha256: 36eba5fde11962133b469c4121d83e26fba48654ee8f5753e5ffaf36d8631c47 + md5: 07d5646ea9f22f4b1c46c2947d1b2f58 depends: - matplotlib-base >=3.9.2,<3.9.3.0a0 - pyside6 >=6.7.2 @@ -12948,16 +13038,17 @@ packages: license: PSF-2.0 license_family: PSF purls: [] - size: 8747 - timestamp: 1723759696471 + size: 8821 + timestamp: 1726164949072 - kind: conda name: matplotlib version: 3.9.2 - build: py312hb401068_0 + build: py312hb401068_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_0.conda - sha256: 1a3a285255fdb62dbd58df5426910071d47afdca8608a9f22c21dd74b8d1b308 - md5: f468fd4f10632ff2500482118a3d4ace + url: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_1.conda + sha256: 91866c86a6e5609a132902077b6d1dc322a1bba7dd85dcea4d0bbfbdf5748437 + md5: 522402426e34fce47653fd99ffc40a22 depends: - matplotlib-base >=3.9.2,<3.9.3.0a0 - python >=3.12,<3.13.0a0 @@ -12966,16 +13057,17 @@ packages: license: PSF-2.0 license_family: PSF purls: [] - size: 8799 - timestamp: 1723759810727 + size: 8847 + timestamp: 1726165120341 - kind: conda name: matplotlib-base version: 3.9.2 - build: py312h0d5aeb7_0 + build: py312h30cc4df_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h0d5aeb7_0.conda - sha256: e84ce46cad067c84d737229f642613734c69d665dd7b6f3997aaa3586d2da41c - md5: 0c73a08429d20f15fa8b28083ec04cc9 + url: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h30cc4df_1.conda + sha256: 2f8f222cebd8c5aa3d3878496bdfb976acedf7aad0cf4abce1c919d03b57c7ee + md5: 0cca3ae643d5cbfe380fda45bd55e001 depends: - __osx >=10.13 - certifi >=2020.06.20 @@ -12984,7 +13076,7 @@ packages: - fonttools >=4.22.0 - freetype >=2.12.1,<3.0a0 - kiwisolver >=1.3.1 - - libcxx >=16 + - libcxx >=17 - numpy >=1.19,<3 - numpy >=1.23 - packaging >=20.0 @@ -12998,26 +13090,24 @@ packages: license_family: PSF purls: - pkg:pypi/matplotlib?source=hash-mapping - size: 7905104 - timestamp: 1723759753087 + size: 7678288 + timestamp: 1726165095191 - kind: conda name: matplotlib-base version: 3.9.2 - build: py312h854627b_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312h854627b_0.conda - sha256: ae075b97ce43439a7a914bf478564927a3dfe00724fb69555947cc3bae737a11 - md5: a57b0ae7c0aac603839a4e83a3e997d6 + build: py312h90004f6_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_1.conda + sha256: ee7d8321d254082f6531b4a8437272cbacac7e76e51dd2b25378be4ba379fbc1 + md5: cb6fe391da87c2fe0a0566ea3d9b0a0c depends: - - __glibc >=2.17,<3.0.a0 - certifi >=2020.06.20 - contourpy >=1.0.1 - cycler >=0.10 - fonttools >=4.22.0 - freetype >=2.12.1,<3.0a0 - kiwisolver >=1.3.1 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - numpy >=1.19,<3 - numpy >=1.23 - packaging >=20.0 @@ -13027,28 +13117,34 @@ packages: - python-dateutil >=2.7 - python_abi 3.12.* *_cp312 - qhull >=2020.2,<2020.3.0a0 - - tk >=8.6.13,<8.7.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: PSF-2.0 license_family: PSF purls: - pkg:pypi/matplotlib?source=hash-mapping - size: 7904910 - timestamp: 1723759675614 + size: 7803682 + timestamp: 1726165916612 - kind: conda name: matplotlib-base version: 3.9.2 - build: py312h90004f6_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_0.conda - sha256: 50db6571737853422b164f2de6afb60d202043c078c8e52939396e5ea65a494f - md5: f1422f097083503f11d336f155748b73 + build: py312hd3ec401_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_1.conda + sha256: 3efd50d9b7b0f1b30611585810d4ae7566d7c860c101f47ec9372f6d4a80d040 + md5: 2f4f3854f23be30de29e9e4d39758349 depends: + - __glibc >=2.17,<3.0.a0 - certifi >=2020.06.20 - contourpy >=1.0.1 - cycler >=0.10 - fonttools >=4.22.0 - freetype >=2.12.1,<3.0a0 - kiwisolver >=1.3.1 + - libgcc >=13 + - libstdcxx >=13 - numpy >=1.19,<3 - numpy >=1.23 - packaging >=20.0 @@ -13058,15 +13154,13 @@ packages: - python-dateutil >=2.7 - python_abi 3.12.* *_cp312 - qhull >=2020.2,<2020.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - tk >=8.6.13,<8.7.0a0 license: PSF-2.0 license_family: PSF purls: - pkg:pypi/matplotlib?source=hash-mapping - size: 7747918 - timestamp: 1723760858160 + size: 7892651 + timestamp: 1726164930325 - kind: conda name: matplotlib-inline version: 0.1.7 @@ -13279,18 +13373,6 @@ packages: - pkg:pypi/msgpack?source=hash-mapping size: 88169 timestamp: 1725975418157 -- kind: conda - name: msys2-conda-epoch - version: '20160418' - build: '1' - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - sha256: 99358d58d778abee4dca82ad29fb58058571f19b0f86138363c260049d4ac7f1 - md5: b0309b72560df66f71a9d5e34a5efdfa - purls: [] - size: 3227 - timestamp: 1608166968312 - kind: conda name: multimethod version: 1.9.1 @@ -13411,42 +13493,44 @@ packages: - kind: conda name: mysql-common version: 9.0.1 - build: h70512c7_0 + build: h266115a_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h70512c7_0.conda - sha256: 4417ba9daf1f818e62e399dc9ab33fcd12741d79d19db0884394cc9c766ae78d - md5: c567b6fa201bc424e84f1e70f7a36095 + url: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda + sha256: f77130a529afa61fde755ae60b6d71df20c20c866a9ad75709107cf63a9f777c + md5: e97f73d51b5acdf1340a15b195738f16 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - openssl >=3.3.1,<4.0a0 + - libgcc >=13 + - libstdcxx >=13 + - openssl >=3.3.2,<4.0a0 license: GPL-2.0-or-later license_family: GPL purls: [] - size: 612947 - timestamp: 1723209940114 + size: 640042 + timestamp: 1727340440162 - kind: conda name: mysql-libs version: 9.0.1 - build: ha479ceb_0 + build: he0572af_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-ha479ceb_0.conda - sha256: f4bea852a48a2168d2bdb73c9be6e3d0ba30525a7e4f0472e899a0773206a8a9 - md5: 6fd406aef37faad86bd7f37a94fb6f8a + url: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda + sha256: b1c95888b3b900f5dd45446d9addb60c64bd0ea6547eb074624892c36634701c + md5: 274f367df5d56f152a49ed3203c3b1c1 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - libgcc >=13 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - - mysql-common 9.0.1 h70512c7_0 - - openssl >=3.3.1,<4.0a0 + - mysql-common 9.0.1 h266115a_1 + - openssl >=3.3.2,<4.0a0 - zstd >=1.5.6,<1.6.0a0 license: GPL-2.0-or-later license_family: GPL purls: [] - size: 1368619 - timestamp: 1723210027997 + size: 1368648 + timestamp: 1727340508054 - kind: conda name: nbclient version: 0.10.0 @@ -13666,43 +13750,43 @@ packages: timestamp: 1669785313586 - kind: conda name: nss - version: '3.104' + version: '3.105' build: h3135457_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/nss-3.104-h3135457_0.conda - sha256: a5b3fe0367a39edfac92e2cd69426123049257cb6aedf9bba002ea45c70fcdfc - md5: 8cf0f6f72197a4fb10ccb897b30f1731 + url: https://conda.anaconda.org/conda-forge/osx-64/nss-3.105-h3135457_0.conda + sha256: d74a19e474bf2e4da23036ca1edb02cd5a1c19bdf23403f36a1700de65f8b4e4 + md5: 8717a72a9245328f588385c2569dff57 depends: - __osx >=10.13 - libcxx >=17 - - libsqlite >=3.46.0,<4.0a0 + - libsqlite >=3.46.1,<4.0a0 - libzlib >=1.3.1,<2.0a0 - nspr >=4.35,<5.0a0 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 1859778 - timestamp: 1725079369298 + size: 1902027 + timestamp: 1727393001729 - kind: conda name: nss - version: '3.104' + version: '3.105' build: hd34e28f_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda - sha256: 0beb64ae310a34537c41e43110ebc24352c4319e6348cebe3d8a89b02382212c - md5: 0664e59f6937a660eba9f3d2f9123fa8 + url: https://conda.anaconda.org/conda-forge/linux-64/nss-3.105-hd34e28f_0.conda + sha256: 4888112f00f46490169e60cd2455af78e53d67d6ca70eb8c4e203d6e990bcfd0 + md5: 28d7602527b76052422aaf5d6fd7ad81 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - - libsqlite >=3.46.0,<4.0a0 + - libsqlite >=3.46.1,<4.0a0 - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - nspr >=4.35,<5.0a0 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 1979701 - timestamp: 1725079209552 + size: 2001454 + timestamp: 1727392742253 - kind: conda name: numba version: 0.60.0 @@ -13796,13 +13880,13 @@ packages: timestamp: 1718888811663 - kind: conda name: numba_celltree - version: 0.2.0 + version: 0.2.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.0-pyhd8ed1ab_0.conda - sha256: 8ef116fc2af9d70afe8123cb4157f5efa55cfd042fd1ef36cad9aab65b36ca5a - md5: e2ed9d4ac5f28671045cd33b2269969a + url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda + sha256: 84d3c63a3a58c2039a50738be391b3401e148b5a39911a191acbffc680d3313a + md5: bcc32246fc51d0ca5893fea58314d630 depends: - numba >=0.50 - numpy @@ -13811,8 +13895,8 @@ packages: license_family: MIT purls: - pkg:pypi/numba-celltree?source=hash-mapping - size: 33566 - timestamp: 1724401764094 + size: 33709 + timestamp: 1727983962954 - kind: conda name: numpy version: 2.0.2 @@ -13920,7 +14004,7 @@ packages: md5: 7e7099ad94ac3b599808950cec30ad4e depends: - libpng >=1.6.43,<1.7.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.2.13,<2.0.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -13942,7 +14026,7 @@ packages: - libgcc-ng >=12 - libpng >=1.6.43,<1.7.0a0 - libstdcxx-ng >=12 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.2.13,<2.0.0a0 license: BSD-2-Clause license_family: BSD @@ -13960,13 +14044,51 @@ packages: depends: - libcxx >=16 - libpng >=1.6.43,<1.7.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.2.13,<2.0.0a0 license: BSD-2-Clause license_family: BSD purls: [] size: 331273 timestamp: 1709159538792 +- kind: conda + name: openldap + version: 2.6.8 + build: hcd2896d_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/openldap-2.6.8-hcd2896d_0.conda + sha256: fa7d6a2f276732ad15d8e7dcb3a9631aa873c63fece3ac3cb2e0320a6badd870 + md5: 91279e088f7903edc3c101b268436529 + depends: + - __osx >=10.13 + - cyrus-sasl >=2.1.27,<3.0a0 + - krb5 >=1.21.2,<1.22.0a0 + - libcxx >=16 + - openssl >=3.3.0,<4.0a0 + license: OLDAP-2.8 + license_family: BSD + purls: [] + size: 777179 + timestamp: 1716378065016 +- kind: conda + name: openldap + version: 2.6.8 + build: hedd0468_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda + sha256: 902652f7a106caa6ea9db2c44118078e23a499bf091ce8ea01d8498c156e8219 + md5: dcd0ed5147d8876b0848a552b416ce76 + depends: + - cyrus-sasl >=2.1.27,<3.0a0 + - krb5 >=1.21.2,<1.22.0a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 + - openssl >=3.3.0,<4.0a0 + license: OLDAP-2.8 + license_family: BSD + purls: [] + size: 780492 + timestamp: 1716377814828 - kind: conda name: openpyxl version: 3.1.5 @@ -14082,38 +14204,40 @@ packages: - kind: conda name: orc version: 2.0.2 - build: h22b2039_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h22b2039_0.conda - sha256: b5a0667937d9d2d8d50e624e67fdc54c898a33013cd3a6fada343f3c4e69ae6e - md5: f7c6463d97edb79a39df8e5e90c53b1b + build: h1c5a4bf_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h1c5a4bf_1.conda + sha256: 08274ce3433d35c03da8ccc00f8908ed37af9e24d16c5c7befbc3eaf135add04 + md5: 524025f3ad525a28d11044d8991c5e98 depends: - - __osx >=10.13 - - libcxx >=16 - - libprotobuf >=4.25.3,<4.25.4.0a0 + - libprotobuf >=5.27.5,<5.27.6.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - snappy >=1.2.1,<1.3.0a0 - tzdata + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.6.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 466353 - timestamp: 1723760915178 + size: 895548 + timestamp: 1727242629823 - kind: conda name: orc version: 2.0.2 - build: h669347b_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda - sha256: 8a126e0be7f87c499f0a9b5229efa4321e60fc4ae46abdec9b13240631cb1746 - md5: 1e6c10f7d749a490612404efeb179eb8 + build: h52ea4d3_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/orc-2.0.2-h52ea4d3_1.conda + sha256: 9004a65831743a3a52cc74312d454fb52d8a37141188f3a96f29d33e58215047 + md5: c217341f1416bab5d027e776981dccf4 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libprotobuf >=4.25.3,<4.25.4.0a0 - - libstdcxx-ng >=12 + - __osx >=10.13 + - libcxx >=17 + - libprotobuf >=5.27.5,<5.27.6.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - snappy >=1.2.1,<1.3.0a0 @@ -14122,31 +14246,32 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 1066349 - timestamp: 1723760593232 + size: 475535 + timestamp: 1727242441383 - kind: conda name: orc version: 2.0.2 - build: h784c2ca_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/orc-2.0.2-h784c2ca_0.conda - sha256: f083c8f49430ca80b6d8a776c37bc1021075dc5f826527c44a85f90607a5c652 - md5: dbb01d6e4f992ea4f0dcb049ab926cc7 + build: h690cf93_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h690cf93_1.conda + sha256: ce023f259ffd93b4678cc582fc4b15a8a991a7b8edd9def8b6838bf7e7962bec + md5: 0044701dd48af57d3d5467a704ef9ebd depends: - - libprotobuf >=4.25.3,<4.25.4.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libprotobuf >=5.27.5,<5.27.6.0a0 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - snappy >=1.2.1,<1.3.0a0 - tzdata - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.6.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 999325 - timestamp: 1723761049521 + size: 1184634 + timestamp: 1727242386732 - kind: conda name: overrides version: 7.7.0 @@ -14184,86 +14309,90 @@ packages: timestamp: 1718189540074 - kind: conda name: pandas - version: 2.2.2 - build: py312h1171441_1 + version: 2.2.3 + build: py312h72972c8_1 build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.2-py312h1171441_1.conda - sha256: 99ef3986a0c6a5fe31a94b298f3ef60eb7ec7aa683a9aee6682f97d003aeb423 - md5: 240737937f1f046b0e03ecc11ac4ec98 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.3-py312h72972c8_1.conda + sha256: dfd30e665b1ced1b783ca303799e250d8acc40943bcefb3a9b2bb13c3b17911c + md5: bf6f01c03e0688523d4b5cff8fe8c977 depends: - - __osx >=10.13 - - libcxx >=16 - numpy >=1.19,<3 + - numpy >=1.22.4 - python >=3.12,<3.13.0a0 - python-dateutil >=2.8.1 - python-tzdata >=2022a - python_abi 3.12.* *_cp312 - - pytz >=2020.1 + - pytz >=2020.1,<2024.2 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/pandas?source=hash-mapping - size: 14673730 - timestamp: 1715898164799 + size: 14218658 + timestamp: 1726879426348 - kind: conda name: pandas - version: 2.2.2 - build: py312h1d6d2e6_1 + version: 2.2.3 + build: py312h98e817e_1 build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.2-py312h1d6d2e6_1.conda - sha256: 80fd53b68aa89b929d03874b99621ec8cc6a12629bd8bfbdca87a95f8852af96 - md5: ae00b61f3000d2284d1f2584d4dfafa8 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/pandas-2.2.3-py312h98e817e_1.conda + sha256: 86c252ce5718b55129303f7d5c9a8664d8f0b23e303579142d09fcfd701e4fbe + md5: a7f7c58bbbfcdf820edb6e544555fe8f depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 + - __osx >=10.13 + - libcxx >=17 - numpy >=1.19,<3 + - numpy >=1.22.4 - python >=3.12,<3.13.0a0 - python-dateutil >=2.8.1 - python-tzdata >=2022a - python_abi 3.12.* *_cp312 - - pytz >=2020.1 + - pytz >=2020.1,<2024.2 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/pandas?source=hash-mapping - size: 15458981 - timestamp: 1715898284697 + size: 14575645 + timestamp: 1726879062042 - kind: conda name: pandas - version: 2.2.2 - build: py312h72972c8_1 + version: 2.2.3 + build: py312hf9745cd_1 build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.2-py312h72972c8_1.conda - sha256: f27b950c52cac5784b184a258c599cea81fcbfbd688897da799de4b6bf91af6e - md5: 92a5cf9f4778c6c9e02582d99885b34d + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda + sha256: ad275a83bfebfa8a8fee9b0569aaf6f513ada6a246b2f5d5b85903d8ca61887e + md5: 8bce4f6caaf8c5448c7ac86d87e26b4b depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 - numpy >=1.19,<3 + - numpy >=1.22.4 - python >=3.12,<3.13.0a0 - python-dateutil >=2.8.1 - python-tzdata >=2022a - python_abi 3.12.* *_cp312 - - pytz >=2020.1 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - pytz >=2020.1,<2024.2 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/pandas?source=hash-mapping - size: 14181121 - timestamp: 1715899159343 + size: 15436913 + timestamp: 1726879054912 - kind: conda name: pandas-stubs - version: 2.2.2.240909 + version: 2.2.3.241009 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240909-pyhd8ed1ab_0.conda - sha256: 1cf735133060d2c687bc556824709f596e16b0d992fff8c3d16bf6fda7cb92ae - md5: 5139243ed81869ac257a910995650f36 + url: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.3.241009-pyhd8ed1ab_0.conda + sha256: c89528fab4e9db70b6b9e59ec18dbca1d97c3635f03a5d8c63d461098777f049 + md5: a533ed91fb5f1cb8e2b2b1907e1a576c depends: - numpy >=1.26.0 - python >=3.9 @@ -14271,8 +14400,8 @@ packages: license: BSD-3-Clause purls: - pkg:pypi/pandas-stubs?source=hash-mapping - size: 98489 - timestamp: 1725975727109 + size: 98654 + timestamp: 1728505724880 - kind: conda name: pandera version: 0.20.4 @@ -14526,9 +14655,9 @@ packages: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.2,<3.0a0 - python >=3.12,<3.13.0a0 @@ -14557,9 +14686,9 @@ packages: - lcms2 >=2.16,<3.0a0 - libgcc >=13 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.2,<3.0a0 - python >=3.12,<3.13.0a0 @@ -14584,9 +14713,9 @@ packages: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.2,<3.0a0 - python >=3.12,<3.13.0a0 @@ -14684,21 +14813,21 @@ packages: timestamp: 1694617398467 - kind: conda name: platformdirs - version: 4.3.2 + version: 4.3.6 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda - sha256: 3aef5bb863a2db94e47272fd5ec5a5e4b240eafba79ebb9df7a162797cf035a3 - md5: e1a2dfcd5695f0744f1bcd3bbfe02523 + url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda + sha256: c81bdeadc4adcda216b2c7b373f0335f5c78cc480d1d55d10f21823590d7e46f + md5: fd8f2b18b65bbf62e8f653100690c8d2 depends: - python >=3.8 license: MIT license_family: MIT purls: - pkg:pypi/platformdirs?source=hash-mapping - size: 20623 - timestamp: 1725821846879 + size: 20625 + timestamp: 1726613611845 - kind: conda name: pluggy version: 1.5.0 @@ -14778,7 +14907,7 @@ packages: - libjpeg-turbo >=3.0.0,<4.0a0 - libpng >=1.6.43,<1.7.0a0 - libstdcxx-ng >=13 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.3.1,<2.0a0 - nspr >=4.35,<5.0a0 - nss >=3.103,<4.0a0 @@ -14812,7 +14941,7 @@ packages: - libintl >=0.22.5,<1.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libpng >=1.6.43,<1.7.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.3.1,<2.0a0 - nspr >=4.35,<5.0a0 - nss >=3.103,<4.0a0 @@ -14842,7 +14971,7 @@ packages: - libintl >=0.22.5,<1.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - libpng >=1.6.43,<1.7.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libtiff >=4.6.0,<4.8.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.2,<3.0a0 - poppler-data @@ -14870,83 +14999,96 @@ packages: timestamp: 1675353652214 - kind: conda name: postgresql - version: '16.4' - build: h4b98a8f_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/postgresql-16.4-h4b98a8f_1.conda - sha256: 2399f6b2eea2af0bd37a6c71fe9055a83248fbbd438cde14d3057dabff39a279 - md5: 1286c495eb0b5817270acdf5b4144b03 + version: '17.0' + build: h1122569_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_2.conda + sha256: dcc5bc7cfda64d43541f2b22542df4d83f0c55790c982bae5349cec76e824bd9 + md5: ce0728ee5b5a1a7b06d947cbfd72aab0 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 + - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - libpq 16.4 h75a757a_1 + - libgcc >=13 + - libpq 17.0 h04577a9_2 - libxml2 >=2.12.7,<3.0a0 + - libxslt >=1.1.39,<2.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - lz4-c >=1.9.3,<1.10.0a0 + - openldap >=2.6.8,<2.7.0a0 + - openssl >=3.3.2,<4.0a0 - readline >=8.2,<9.0a0 - tzcode - tzdata + - zstd >=1.5.6,<1.6.0a0 license: PostgreSQL purls: [] - size: 4593109 - timestamp: 1724948725869 + size: 5537219 + timestamp: 1727852846436 - kind: conda name: postgresql - version: '16.4' - build: hb2eb5c0_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda - sha256: 7b6c307722ff7acaa26f04a19c124b5548e16a8097576709d911ef7123e2fbaf - md5: 1aaec5dbae29b3f0a2c20eeb84e9e38a + version: '17.0' + build: h13864a0_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h13864a0_2.conda + sha256: 5d47afd1c0aa5b24965f41c5edb0ba65787891b58bc22addeec3cfa6a8c21c9f + md5: 88defb9d344f0d39805f8c819f211f79 depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 + - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - libgcc >=13 - - libpq 16.4 h2d7952a_1 + - libpq 17.0 h6e894e8_2 - libxml2 >=2.12.7,<3.0a0 + - libxslt >=1.1.39,<2.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - lz4-c >=1.9.3,<1.10.0a0 + - openldap >=2.6.8,<2.7.0a0 + - openssl >=3.3.2,<4.0a0 - readline >=8.2,<9.0a0 - tzcode - tzdata + - zstd >=1.5.6,<1.6.0a0 license: PostgreSQL purls: [] - size: 5323539 - timestamp: 1724948476169 + size: 4823803 + timestamp: 1727853241659 - kind: conda name: postgresql - version: '16.4' - build: hd835ec0_1 - build_number: 1 + version: '17.0' + build: heca7946_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/postgresql-16.4-hd835ec0_1.conda - sha256: dfbb2a5979e904d48482a68197227905ef99e4e2e290c989cf07ca7283273a94 - md5: 9dc48367ebcf896988c7df870ec6cba3 + url: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_2.conda + sha256: 54c66acd351fae8aee1ae718c7474b212f9cb3181c492377b4aa632e174758e2 + md5: 6cca6d21f8c09ff8d860d02464f9b0fd depends: + - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - libpq 16.4 hab9416b_1 + - libpq 17.0 h7ec079e_2 - libxml2 >=2.12.7,<3.0a0 + - libxslt >=1.1.39,<2.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - lz4-c >=1.9.3,<1.10.0a0 + - openssl >=3.3.2,<4.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 + - zstd >=1.5.6,<1.6.0a0 license: PostgreSQL purls: [] - size: 18666036 - timestamp: 1724949389398 + size: 4317261 + timestamp: 1727853708062 - kind: conda name: pre-commit - version: 3.8.0 - build: pyha770c72_1 - build_number: 1 + version: 4.0.1 + build: pyha770c72_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda - sha256: c2b964c86b2cd00e494093d751b1f8697b3c4bf924ff70648387af161444cc82 - md5: 004cff3a7f6fafb0a041fb575de85185 + url: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda + sha256: 2490b18ec802d8f085f2de8298a3d275451f7db17769353080dfb121fe386675 + md5: 5971cc64048943605f352f7f8612de6c depends: - cfgv >=2.0.0 - identify >=1.0.0 @@ -14958,116 +15100,113 @@ packages: license_family: MIT purls: - pkg:pypi/pre-commit?source=hash-mapping - size: 180526 - timestamp: 1725795837882 + size: 194633 + timestamp: 1728420305558 - kind: conda name: proj - version: 9.4.1 - build: h54d7996_1 - build_number: 1 + version: 9.5.0 + build: h12925eb_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda - sha256: 7e5aa324f89eece539001daa8df802d1b5851caee4be41b99ffe3b6e168993a9 - md5: e479d1991c725e1a355f33c0e40dbc66 + url: https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda + sha256: 936de8754054d97223e87cc87b72641d2c7582d536ee9eee4b0443fa66e2733f + md5: 8c29983ebe50cc7e0998c34bc7614222 depends: - __glibc >=2.17,<3.0.a0 - - libcurl >=8.9.0,<9.0a0 - - libgcc-ng >=12 - - libsqlite >=3.46.0,<4.0a0 - - libstdcxx-ng >=12 - - libtiff >=4.6.0,<4.7.0a0 + - libcurl >=8.10.0,<9.0a0 + - libgcc >=13 + - libsqlite >=3.46.1,<4.0a0 + - libstdcxx >=13 + - libtiff >=4.6.0,<4.8.0a0 - sqlite constrains: - proj4 ==999999999999 license: MIT license_family: MIT purls: [] - size: 3050689 - timestamp: 1722327846022 + size: 3093445 + timestamp: 1726489083290 - kind: conda name: proj - version: 9.4.1 - build: hd9569ee_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/proj-9.4.1-hd9569ee_1.conda - sha256: cde60f7c07598fd183a90f2725f5b7f3028a382a163f4efcb8b52dcfbb798d03 - md5: 6e15f5054b179959d2410c2e53d5a3e4 + version: 9.5.0 + build: h70d2bda_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/proj-9.5.0-h70d2bda_0.conda + sha256: 9530508868971b9866486c6cb370a18ca97d6960ccb010f9ca0eaeb539b16910 + md5: bc2d54e486a633b5f6c3f18c1fe734fb depends: - - libcurl >=8.9.0,<9.0a0 - - libsqlite >=3.46.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - __osx >=10.13 + - libcurl >=8.10.0,<9.0a0 + - libcxx >=17 + - libsqlite >=3.46.1,<4.0a0 + - libtiff >=4.6.0,<4.8.0a0 - sqlite - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - proj4 ==999999999999 license: MIT license_family: MIT purls: [] - size: 2726576 - timestamp: 1722328352769 + size: 2790379 + timestamp: 1726489327240 - kind: conda name: proj - version: 9.4.1 - build: hf92c781_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/proj-9.4.1-hf92c781_1.conda - sha256: 826e1fcd191d17a6f16c745779254265e4cf1cdbd1761e627e3cdf0b9d6ed487 - md5: edf9f0581ffc0f50a1159943be5d0729 + version: 9.5.0 + build: hd9569ee_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/proj-9.5.0-hd9569ee_0.conda + sha256: ebd1fee2834cf5971a08dfb665606f775302aa22e98d5d893d35323805311419 + md5: 4cfbffd1cd2bbff30e975a71b1769597 depends: - - __osx >=10.13 - - libcurl >=8.9.0,<9.0a0 - - libcxx >=16 - - libsqlite >=3.46.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libcurl >=8.10.0,<9.0a0 + - libsqlite >=3.46.1,<4.0a0 + - libtiff >=4.6.0,<4.8.0a0 - sqlite + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - proj4 ==999999999999 license: MIT license_family: MIT purls: [] - size: 2831538 - timestamp: 1722327962605 + size: 2709612 + timestamp: 1726489723807 - kind: conda name: prometheus_client - version: 0.20.0 + version: 0.21.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.20.0-pyhd8ed1ab_0.conda - sha256: 757cd91d01c2e0b64fadf6bc9a11f558cf7638d897dfbaf7415ddf324d5405c9 - md5: 9a19b94034dd3abb2b348c8b93388035 + url: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + sha256: 01f0c3dd00081637ed920a922b17bcc8ed49608404ee466ced806856e671f6b9 + md5: 07e9550ddff45150bfc7da146268e165 depends: - python >=3.8 license: Apache-2.0 license_family: Apache purls: - pkg:pypi/prometheus-client?source=hash-mapping - size: 48913 - timestamp: 1707932844383 + size: 49024 + timestamp: 1726902073034 - kind: conda name: prompt-toolkit - version: 3.0.47 + version: 3.0.48 build: pyha770c72_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda - sha256: d93ac5853e398aaa10f0dd7addd64b411f94ace1f9104d619cd250e19a5ac5b4 - md5: 1247c861065d227781231950e14fe817 + url: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda + sha256: 44e4e6108d425a666856a52d1523e5d70890256a8920bb0dcd3d55cc750f3207 + md5: 4c05134c48b6a74f33bbb9938e4a115e depends: - python >=3.7 - wcwidth constrains: - - prompt_toolkit 3.0.47 + - prompt_toolkit 3.0.48 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/prompt-toolkit?source=hash-mapping - size: 270710 - timestamp: 1718048095491 + size: 270271 + timestamp: 1727341744544 - kind: conda name: psutil version: 6.0.0 @@ -15131,64 +15270,54 @@ packages: - kind: conda name: pthread-stubs version: '0.4' - build: h36c2ea0_1001 - build_number: 1001 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 - sha256: 67c84822f87b641d89df09758da498b2d4558d47b920fd1d3fe6d3a871e000ff - md5: 22dad4df6e8630e8dff2428f6f6a7036 + build: h00291cd_1002 + build_number: 1002 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-h00291cd_1002.conda + sha256: 05944ca3445f31614f8c674c560bca02ff05cb51637a96f665cb2bbe496099e5 + md5: 8bcf980d2c6b17094961198284b8e862 depends: - - libgcc-ng >=7.5.0 + - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 5625 - timestamp: 1606147468727 + size: 8364 + timestamp: 1726802331537 - kind: conda name: pthread-stubs version: '0.4' - build: hc929b4f_1001 - build_number: 1001 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2 - sha256: 6e3900bb241bcdec513d4e7180fe9a19186c1a38f0b4080ed619d26014222c53 - md5: addd19059de62181cd11ae8f4ef26084 + build: h0e40799_1002 + build_number: 1002 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-h0e40799_1002.conda + sha256: 7e446bafb4d692792310ed022fe284e848c6a868c861655a92435af7368bae7b + md5: 3c8f2573569bb816483e5cf57efbbe29 + depends: + - libgcc >=13 + - libwinpthread >=12.0.0.r4.gg4f2fc60ca + - ucrt >=10.0.20348.0 license: MIT license_family: MIT purls: [] - size: 5653 - timestamp: 1606147699844 + size: 9389 + timestamp: 1726802555076 - kind: conda name: pthread-stubs version: '0.4' - build: hcd874cb_1001 - build_number: 1001 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2 - sha256: bb5a6ddf1a609a63addd6d7b488b0f58d05092ea84e9203283409bff539e202a - md5: a1f820480193ea83582b13249a7e7bd9 + build: hb9d3cd8_1002 + build_number: 1002 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda + sha256: 9c88f8c64590e9567c6c80823f0328e58d3b1efb0e1c539c0315ceca764e0973 + md5: b3c17d95b5a10c6e64a21fa17573e70e depends: - - m2w64-gcc-libs + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 6417 - timestamp: 1606147814351 -- kind: conda - name: pthreads-win32 - version: 2.9.1 - build: hfa6e2cd_3 - build_number: 3 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pthreads-win32-2.9.1-hfa6e2cd_3.tar.bz2 - sha256: 576a228630a72f25d255a5e345e5f10878e153221a96560f2498040cd6f54005 - md5: e2da8758d7d51ff6aa78a14dfb9dbed4 - depends: - - vc 14.* - license: LGPL 2 - purls: [] - size: 144301 - timestamp: 1537755684331 + size: 8252 + timestamp: 1726802366959 - kind: conda name: ptyprocess version: 0.7.0 @@ -15405,32 +15534,32 @@ packages: timestamp: 1711811634025 - kind: conda name: pydantic - version: 2.9.1 + version: 2.9.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.1-pyhd8ed1ab_0.conda - sha256: 9064ec63d676d83452a6a07cb92d95ebfa02b5016841956ce55e324c45e012ee - md5: 5309e66d385d7367364e838764ad2ac4 + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + sha256: 1b7b0dc9f6af4da156bf22b0263be70829364a08145c696d3670facff2f6441a + md5: 1eb533bb8eb2199e3fef3e4aa147319f depends: - annotated-types >=0.6.0 - - pydantic-core 2.23.3 + - pydantic-core 2.23.4 - python >=3.7 - typing-extensions >=4.6.1 license: MIT license_family: MIT purls: - pkg:pypi/pydantic?source=hash-mapping - size: 300605 - timestamp: 1725908662611 + size: 300649 + timestamp: 1726601202431 - kind: conda name: pydantic-core - version: 2.23.3 + version: 2.23.4 build: py312h12e396e_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.3-py312h12e396e_0.conda - sha256: 1894e49665d343cbb5c2ae54107f2bf9077f481cdf6df40e851d14347bd9e07c - md5: 4052762306d758de4d61c7cc71edfe2b + url: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.4-py312h12e396e_0.conda + sha256: 365fde689865087b2a9da636f36678bd59617b324ce7a538b4806e90602b20f1 + md5: 0845ab52d4ea209049129a6a91bc74ba depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -15443,16 +15572,16 @@ packages: license_family: MIT purls: - pkg:pypi/pydantic-core?source=hash-mapping - size: 1615644 - timestamp: 1725735931378 + size: 1611784 + timestamp: 1726525286507 - kind: conda name: pydantic-core - version: 2.23.3 + version: 2.23.4 build: py312h2615798_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.3-py312h2615798_0.conda - sha256: 74904399f23430b6d9d8b2eacb3e12257a2c2ab61c1d324e0206517c90c71cb9 - md5: 2e7eac3f63413617bc6f4e7018305707 + url: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.4-py312h2615798_0.conda + sha256: cda5f2ea2fb8b1aa91b744aadec269ec3060832106242873639df205258aac62 + md5: 94169f56c3ad3d070248c73f71371944 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -15464,16 +15593,16 @@ packages: license_family: MIT purls: - pkg:pypi/pydantic-core?source=hash-mapping - size: 1569350 - timestamp: 1725736732484 + size: 1566768 + timestamp: 1726526372511 - kind: conda name: pydantic-core - version: 2.23.3 + version: 2.23.4 build: py312h669792a_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.3-py312h669792a_0.conda - sha256: 38f7ef2eb082a75cafbcc37d05e285780858dfff64004d80afdd03a04448a88b - md5: 6599b550ea3dae7abbeda4f44e78750c + url: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.4-py312h669792a_0.conda + sha256: 46b17406772d7403ce454c1005e493a2723a189403dd2a70a3566ac4b1f82a4a + md5: 14806afd8ed78812d83e8b9ea4b549c0 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -15485,8 +15614,8 @@ packages: license_family: MIT purls: - pkg:pypi/pydantic-core?source=hash-mapping - size: 1535653 - timestamp: 1725736002889 + size: 1535259 + timestamp: 1726525537029 - kind: conda name: pygments version: 2.18.0 @@ -15548,44 +15677,38 @@ packages: timestamp: 1725875154490 - kind: conda name: pyogrio - version: 0.9.0 - build: py312h5aa26c2_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.9.0-py312h5aa26c2_2.conda - sha256: e9682d1664e09c97f536060696896f6c0e9e84914a635351d406da836140266d - md5: 8b4325775ed711941bbf6b8c5ad2b5e9 + version: 0.10.0 + build: py312h8705084_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.10.0-py312h8705084_0.conda + sha256: 3044e7d1fcdf30159ef7ff3e5f1f648004e3f46bc522aac0190141025d55ee04 + md5: 90249b9c726133a702cddb06c1d9dab9 depends: - - __glibc >=2.17,<3.0.a0 - - gdal - - libgcc >=13 - - libgdal >=3.9.2,<3.10.0a0 - libgdal-core >=3.9.2,<3.10.0a0 - - libstdcxx >=13 - numpy - packaging - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/pyogrio?source=hash-mapping - size: 733154 - timestamp: 1725519798030 + size: 806834 + timestamp: 1727772248889 - kind: conda name: pyogrio - version: 0.9.0 - build: py312hcd3578f_2 - build_number: 2 + version: 0.10.0 + build: py312h8f0a83f_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.9.0-py312hcd3578f_2.conda - sha256: 58a8a7fbfd8e1d4637c89ad1d8ae2dc5874d8cf92d7593c5c006cc05efe73d78 - md5: 70b136b0fdb2751e6c2a0bfd00ef2f6b + url: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.10.0-py312h8f0a83f_0.conda + sha256: 6928f0571a899d7a182bcea108071139033c6d303fc25c642ba4bc30296ca4a8 + md5: 485d01a5c9fdee12ab3552f7a6f078d6 depends: - __osx >=10.13 - - gdal - libcxx >=17 - - libgdal >=3.9.2,<3.10.0a0 - libgdal-core >=3.9.2,<3.10.0a0 - numpy - packaging @@ -15595,34 +15718,31 @@ packages: license_family: MIT purls: - pkg:pypi/pyogrio?source=hash-mapping - size: 664111 - timestamp: 1725519932877 + size: 567616 + timestamp: 1727771812198 - kind: conda name: pyogrio - version: 0.9.0 - build: py312hd215820_2 - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.9.0-py312hd215820_2.conda - sha256: 3adf95bc11ab4352155e7273a8ba235230f85a13b0506dd143e0504fb292a40b - md5: 3656489c0c4514e769f26dea21c3dc21 + version: 0.10.0 + build: py312he8b4914_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.10.0-py312he8b4914_0.conda + sha256: cb0647597ae54d0007e698149f6ca519d80c389daee0f4fef149fe9902a0b31b + md5: 309f7524c82d168cc055e7b136713693 depends: - - gdal - - libgdal >=3.9.2,<3.10.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - libgdal-core >=3.9.2,<3.10.0a0 + - libstdcxx >=13 - numpy - packaging - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/pyogrio?source=hash-mapping - size: 887063 - timestamp: 1725520279515 + size: 639343 + timestamp: 1727771812389 - kind: conda name: pyparsing version: 3.1.4 @@ -15642,122 +15762,124 @@ packages: timestamp: 1724616224956 - kind: conda name: pyproj - version: 3.6.1 - build: py312h6f27134_9 - build_number: 9 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.6.1-py312h6f27134_9.conda - sha256: 42c2eaa5d75ad0e184e8b957ae07c9d9438fa611c8b896a9a387aaafe10dc3b2 - md5: a7414c734b08e74d22581a9a07686301 + version: 3.7.0 + build: py312h9673cc4_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.7.0-py312h9673cc4_0.conda + sha256: 7d3da4af08caf0491779b51ea055ecb74bd99ef37981ad19f9404349dbfa53ed + md5: c44fa471064d7ca1c3f335dfeafa5651 depends: + - __osx >=10.13 - certifi - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/pyproj?source=hash-mapping - size: 740323 - timestamp: 1725436440016 + size: 489504 + timestamp: 1727795643053 - kind: conda name: pyproj - version: 3.6.1 - build: py312h9211aeb_9 - build_number: 9 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda - sha256: 9525e8363d4b5509c7520f8b47a7011c0d1e8f66b294f17679093a3d15af086c - md5: 173afeb0d112c854fd1a9fcac4b5cce3 + version: 3.7.0 + build: py312ha24589b_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.7.0-py312ha24589b_0.conda + sha256: 8530fe6b44cebaf5ce57c13c7144760058b8f0b83b940b178b52fd8aa9fb82db + md5: 1f0cacc6f721d87faa12ef1ce66d112d depends: - - __glibc >=2.17,<3.0.a0 - certifi - - libgcc >=13 - - proj >=9.4.1,<9.5.0a0 + - proj >=9.5.0,<9.6.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/pyproj?source=hash-mapping - size: 547934 - timestamp: 1725436149519 + size: 748941 + timestamp: 1727795870023 - kind: conda name: pyproj - version: 3.6.1 - build: py312haf32e09_9 - build_number: 9 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.6.1-py312haf32e09_9.conda - sha256: eaf202c9484db91c2e7c851dcbdbb900c5c68126a088f962ccc9605604dd8e69 - md5: 87547186b89fe0263b5ce2d638b3f958 + version: 3.7.0 + build: py312he630544_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda + sha256: 713d38f8f4fce141eec5c282e333b145a1359c1c6cc34f506d03b164497e6a74 + md5: 427799f15b36751761941f4cbd7d780f depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - certifi - - proj >=9.4.1,<9.5.0a0 + - libgcc >=13 + - proj >=9.5.0,<9.6.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: MIT license_family: MIT purls: - pkg:pypi/pyproj?source=hash-mapping - size: 483738 - timestamp: 1725436121238 + size: 555468 + timestamp: 1727795528667 - kind: conda name: pyside6 - version: 6.7.2 - build: py312h2ee7485_3 - build_number: 3 + version: 6.7.3 + build: py312h2ee7485_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.2-py312h2ee7485_3.conda - sha256: bf437de4f749eba0e77e53faf0a49abb6ebb1c83787636cd2993a0d4c2a558a7 - md5: 18ab45e45cd109d6223e5f89f06ecb0a + url: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.3-py312h2ee7485_1.conda + sha256: e3f3c1eb70a051f06c0598ac70fb9d5e770a116a56b399d48f4ce22a2c169e89 + md5: d55a97c0263d3f780726a76ae18498cc depends: - - libclang13 >=18.1.8 + - libclang13 >=19.1.0 - libxml2 >=2.12.7,<3.0a0 - libxslt >=1.1.39,<2.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - qt6-main 6.7.2.* - - qt6-main >=6.7.2,<6.8.0a0 + - qt6-main 6.7.3.* + - qt6-main >=6.7.3,<6.8.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: LGPL-3.0-only + license_family: LGPL purls: - pkg:pypi/pyside6?source=hash-mapping - pkg:pypi/shiboken6?source=hash-mapping - size: 9237358 - timestamp: 1726118783905 + size: 9211882 + timestamp: 1727987852185 - kind: conda name: pyside6 - version: 6.7.2 - build: py312h91f0f75_3 - build_number: 3 + version: 6.7.3 + build: py312h91f0f75_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.2-py312h91f0f75_3.conda - sha256: 797e68f35d400abcb3eedc3ed10df1b2ca3d0c405d98721c821978c2f0666996 - md5: 19dba13e88e2d4800860edc05dda1c6a + url: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.3-py312h91f0f75_1.conda + sha256: e9d26444e4a554a71e885017898b101d388855277b6604f3235e50b63cc66fe0 + md5: 64a74d686fd29fa04c4c313a688e2421 depends: - __glibc >=2.17,<3.0.a0 - - libclang13 >=18.1.8 + - libclang13 >=19.1.0 + - libegl >=1.7.0,<2.0a0 - libgcc >=13 + - libgl >=1.7.0,<2.0a0 + - libopengl >=1.7.0,<2.0a0 - libstdcxx >=13 - libxml2 >=2.12.7,<3.0a0 - libxslt >=1.1.39,<2.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - qt6-main 6.7.2.* - - qt6-main >=6.7.2,<6.8.0a0 + - qt6-main 6.7.3.* + - qt6-main >=6.7.3,<6.8.0a0 license: LGPL-3.0-only + license_family: LGPL purls: - pkg:pypi/pyside6?source=hash-mapping - pkg:pypi/shiboken6?source=hash-mapping - size: 10600770 - timestamp: 1726118924165 + size: 10458409 + timestamp: 1727987584620 - kind: conda name: pysocks version: 1.7.1 @@ -15817,6 +15939,7 @@ packages: constrains: - pytest-faulthandler >=2 license: MIT + license_family: MIT purls: - pkg:pypi/pytest?source=hash-mapping size: 258293 @@ -15864,26 +15987,21 @@ packages: timestamp: 1718138508765 - kind: conda name: python - version: 3.12.5 - build: h2ad013b_0_cpython - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda - sha256: e2aad83838988725d4ffba4e9717b9328054fd18a668cff3377e0c50f109e8bd - md5: 9c56c4df45f6571b13111d8df2448692 + version: 3.12.7 + build: h8f8b54e_0_cpython + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.7-h8f8b54e_0_cpython.conda + sha256: 28172d94f7193c5075c0fc3c4b1bb617c512ffc991f4e2af0dbb6a2916872b76 + md5: 7f81191b1ca1113e694e90e15c27a12f depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - bzip2 >=1.0.8,<2.0a0 - - ld_impl_linux-64 >=2.36.1 - - libexpat >=2.6.2,<3.0a0 + - libexpat >=2.6.3,<3.0a0 - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - - libnsl >=2.0.1,<2.1.0a0 - - libsqlite >=3.46.0,<4.0a0 - - libuuid >=2.38.1,<3.0a0 - - libxcrypt >=4.4.36 + - libsqlite >=3.46.1,<4.0a0 - libzlib >=1.3.1,<2.0a0 - ncurses >=6.5,<7.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - readline >=8.2,<9.0a0 - tk >=8.6.13,<8.7.0a0 - tzdata @@ -15892,25 +16010,30 @@ packages: - python_abi 3.12.* *_cp312 license: Python-2.0 purls: [] - size: 31663253 - timestamp: 1723143721353 + size: 13761315 + timestamp: 1728058247482 - kind: conda name: python - version: 3.12.5 - build: h37a9e06_0_cpython - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda - sha256: c0f39e625b2fd65f70a9cc086fe4b25cc72228453dbbcd92cd5d140d080e38c5 - md5: 517cb4e16466f8d96ba2a72897d14c48 + version: 3.12.7 + build: hc5c86c4_0_cpython + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda + sha256: 674be31ff152d9f0e0fe16959a45e3803a730fc4f54d87df6a9ac4e6a698c41d + md5: 0515111a9cdf69f83278f7c197db9807 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - bzip2 >=1.0.8,<2.0a0 - - libexpat >=2.6.2,<3.0a0 + - ld_impl_linux-64 >=2.36.1 + - libexpat >=2.6.3,<3.0a0 - libffi >=3.4,<4.0a0 - - libsqlite >=3.46.0,<4.0a0 + - libgcc >=13 + - libnsl >=2.0.1,<2.1.0a0 + - libsqlite >=3.46.1,<4.0a0 + - libuuid >=2.38.1,<3.0a0 + - libxcrypt >=4.4.36 - libzlib >=1.3.1,<2.0a0 - ncurses >=6.5,<7.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - readline >=8.2,<9.0a0 - tk >=8.6.13,<8.7.0a0 - tzdata @@ -15919,23 +16042,23 @@ packages: - python_abi 3.12.* *_cp312 license: Python-2.0 purls: [] - size: 12173272 - timestamp: 1723142761765 + size: 31574780 + timestamp: 1728059777603 - kind: conda name: python - version: 3.12.5 - build: h889d299_0_cpython + version: 3.12.7 + build: hce54a09_0_cpython subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda - sha256: 4cef304eb8877fd3094c14b57097ccc1b817b4afbf2223dd45d2b61e44064740 - md5: db056d8b140ab2edd56a2f9bdb203dcd + url: https://conda.anaconda.org/conda-forge/win-64/python-3.12.7-hce54a09_0_cpython.conda + sha256: 2308cfa9ec563360d29ced7fd13a6b60b9a7b3cf8961a95c78c69f486211d018 + md5: 21f1f7c6ccf6b747c5086d2422c230e1 depends: - bzip2 >=1.0.8,<2.0a0 - - libexpat >=2.6.2,<3.0a0 + - libexpat >=2.6.3,<3.0a0 - libffi >=3.4,<4.0a0 - - libsqlite >=3.46.0,<4.0a0 + - libsqlite >=3.46.1,<4.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - tk >=8.6.13,<8.7.0a0 - tzdata - ucrt >=10.0.20348.0 @@ -15946,8 +16069,8 @@ packages: - python_abi 3.12.* *_cp312 license: Python-2.0 purls: [] - size: 15897752 - timestamp: 1723141830317 + size: 15987537 + timestamp: 1728057382072 - kind: conda name: python-dateutil version: 2.9.0 @@ -16002,21 +16125,21 @@ packages: timestamp: 1677079727691 - kind: conda name: python-tzdata - version: '2024.1' + version: '2024.2' build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda - sha256: 9da9a849d53705dee450b83507df1ca8ffea5f83bd21a215202221f1c492f8ad - md5: 98206ea9954216ee7540f0c773f2104d + url: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda + sha256: fe3f62ce2bc714bdaa222ab3f0344a2815ad9e853c6df38d15c9f25de8a3a6d4 + md5: 986287f89929b2d629bd6ef6497dc307 depends: - python >=3.6 license: Apache-2.0 license_family: APACHE purls: - pkg:pypi/tzdata?source=hash-mapping - size: 144024 - timestamp: 1707747742930 + size: 142527 + timestamp: 1727140688093 - kind: conda name: python_abi version: '3.12' @@ -16067,20 +16190,21 @@ packages: timestamp: 1723823139725 - kind: conda name: pytz - version: '2024.2' + version: '2024.1' build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda - sha256: 81c16d9183bb4a6780366ce874e567ee5fc903722f85b2f8d1d9479ef1dafcc9 - md5: 260009d03c9d5c0f111904d851f053dc + url: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda + sha256: 1a7d6b233f7e6e3bbcbad054c8fd51e690a67b129a899a056a5e45dd9f00cb41 + md5: 3eeeeb9e4827ace8c0c1419c85d590ad depends: - python >=3.7 license: MIT + license_family: MIT purls: - pkg:pypi/pytz?source=hash-mapping - size: 186995 - timestamp: 1726055625738 + size: 188538 + timestamp: 1706886944988 - kind: conda name: pywin32 version: '306' @@ -16307,13 +16431,13 @@ packages: timestamp: 1720814433486 - kind: conda name: qt6-main - version: 6.7.2 - build: hb12f9c5_5 - build_number: 5 + version: 6.7.3 + build: h6e8976b_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.2-hb12f9c5_5.conda - sha256: 712c5e6fef0b121bd62d941f8e11fff2ac5e1b36b7af570f4465f51e14193104 - md5: 8c662388c2418f293266f5e7f50df7d7 + url: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda + sha256: f5e4cefa82edec73c9bfc99566391463aeb339cfae8446f9b3c7950fefec6555 + md5: f3234422a977b5d400ccf503ad55c5d1 depends: - __glibc >=2.17,<3.0.a0 - alsa-lib >=1.2.12,<1.3.0a0 @@ -16325,85 +16449,89 @@ packages: - harfbuzz >=9.0.0,<10.0a0 - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - libclang-cpp18.1 >=18.1.8,<18.2.0a0 - - libclang13 >=18.1.8 + - libclang-cpp19.1 >=19.1.0,<19.2.0a0 + - libclang13 >=19.1.0 - libcups >=2.3.3,<2.4.0a0 - - libdrm >=2.4.122,<2.5.0a0 + - libdrm >=2.4.123,<2.5.0a0 - libegl >=1.7.0,<2.0a0 - - libgcc-ng >=12 + - libgcc >=13 - libgl >=1.7.0,<2.0a0 - - libglib >=2.80.3,<3.0a0 + - libglib >=2.82.1,<3.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - - libllvm18 >=18.1.8,<18.2.0a0 - - libpng >=1.6.43,<1.7.0a0 - - libpq >=16.4,<17.0a0 - - libsqlite >=3.46.0,<4.0a0 - - libstdcxx-ng >=12 - - libtiff >=4.6.0,<4.7.0a0 + - libllvm19 >=19.1.0,<19.2.0a0 + - libpng >=1.6.44,<1.7.0a0 + - libpq >=17.0,<18.0a0 + - libsqlite >=3.46.1,<4.0a0 + - libstdcxx >=13 + - libtiff >=4.7.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.17.0,<2.0a0 - libxkbcommon >=1.7.0,<2.0a0 - libxml2 >=2.12.7,<3.0a0 - libzlib >=1.3.1,<2.0a0 - mysql-libs >=9.0.1,<9.1.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - - wayland >=1.23.0,<2.0a0 + - wayland >=1.23.1,<2.0a0 - xcb-util >=0.4.1,<0.5.0a0 - - xcb-util-cursor >=0.1.4,<0.2.0a0 + - xcb-util-cursor >=0.1.5,<0.2.0a0 - xcb-util-image >=0.4.0,<0.5.0a0 - xcb-util-keysyms >=0.4.1,<0.5.0a0 - xcb-util-renderutil >=0.3.10,<0.4.0a0 - xcb-util-wm >=0.4.2,<0.5.0a0 - xorg-libice >=1.1.1,<2.0a0 - xorg-libsm >=1.2.4,<2.0a0 - - xorg-libx11 >=1.8.9,<2.0a0 - - xorg-libxext >=1.3.4,<2.0a0 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxcomposite >=0.4.6,<1.0a0 + - xorg-libxcursor >=1.2.2,<2.0a0 + - xorg-libxdamage >=1.1.6,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxrandr >=1.5.4,<2.0a0 - xorg-libxtst >=1.2.5,<2.0a0 - xorg-libxxf86vm >=1.1.5,<2.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - - qt 6.7.2 + - qt 6.7.3 license: LGPL-3.0-only license_family: LGPL purls: [] - size: 46904534 - timestamp: 1724536870579 + size: 47378301 + timestamp: 1727940486113 - kind: conda name: qt6-main - version: 6.7.2 - build: hbb46ec1_5 - build_number: 5 + version: 6.7.3 + build: hfb098fa_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.2-hbb46ec1_5.conda - sha256: 23d5e8864e9957c00546be554171e3c4415a7e0670870bd361db8e28e0be716e - md5: e14fa5fe2da0bf8cc30d06314ce6ce33 + url: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.3-hfb098fa_1.conda + sha256: c10933396b409f74f05fe7036ddf2b129e219dd3939170c3ebb0fd0790cd14ac + md5: 3dd4b78a610e48def640c3c9acd0c7e7 depends: - double-conversion >=3.3.0,<3.4.0a0 - harfbuzz >=9.0.0,<10.0a0 - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - libclang13 >=18.1.8 - - libglib >=2.80.3,<3.0a0 + - libclang13 >=19.1.0 + - libglib >=2.82.1,<3.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - - libpng >=1.6.43,<1.7.0a0 - - libsqlite >=3.46.0,<4.0a0 - - libtiff >=4.6.0,<4.7.0a0 + - libpng >=1.6.44,<1.7.0a0 + - libsqlite >=3.46.1,<4.0a0 + - libtiff >=4.7.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 + - openssl >=3.3.2,<4.0a0 - pcre2 >=10.44,<10.45.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.6.0a0 constrains: - - qt 6.7.2 + - qt 6.7.3 license: LGPL-3.0-only license_family: LGPL purls: [] - size: 85902078 - timestamp: 1724537977958 + size: 88587578 + timestamp: 1727941590323 - kind: conda name: quarto version: 1.5.57 @@ -16420,6 +16548,7 @@ packages: - pandoc 3.2 - typst 0.11.0 license: GPL-2.0-or-later + license_family: GPL purls: [] size: 15860524 timestamp: 1725910705704 @@ -16439,6 +16568,7 @@ packages: - pandoc 3.2 - typst 0.11.0 license: GPL-2.0-or-later + license_family: GPL purls: [] size: 15729834 timestamp: 1725910469278 @@ -16458,6 +16588,7 @@ packages: - pandoc 3.2 - typst 0.11.0 license: GPL-2.0-or-later + license_family: GPL purls: [] size: 16059182 timestamp: 1725910489963 @@ -16492,43 +16623,43 @@ packages: timestamp: 1724510613002 - kind: conda name: rasterio - version: 1.3.11 - build: py312h4104ae2_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.3.11-py312h4104ae2_0.conda - sha256: 4cdc05af7bb4841b94cadb2433f32242180d80401a324917c7915f0281f2f446 - md5: 0e9793e312dae8b4e34b4d54db6ae5c7 + version: 1.4.1 + build: py312h1701b51_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.4.1-py312h1701b51_0.conda + sha256: 3cae210f13e69f798dedca352a158aef89adbad8f8da73efc870ea5724d983f8 + md5: 92b70792bb71c09cc619a1c50e90c874 depends: - - __osx >=10.13 - affine - attrs - certifi - click >=4 - click-plugins - cligj >=0.5 - - libcxx >=17 - - libgdal >=3.9.2,<3.10.0a0 - libgdal-core >=3.9.2,<3.10.0a0 - - numpy >=1.19,<3 - - proj >=9.4.1,<9.5.0a0 + - numpy >=1.21,<3 + - proj >=9.5.0,<9.6.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - setuptools >=0.9.8 - snuggs >=1.4.1 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/rasterio?source=hash-mapping - size: 6954057 - timestamp: 1725459093401 + size: 7914629 + timestamp: 1727780586284 - kind: conda name: rasterio - version: 1.3.11 - build: py312hd177ed6_0 + version: 1.4.1 + build: py312h8456570_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_0.conda - sha256: 210bddb89d3063fe6b63f9d0d9a0b48c9776e62120e0db82527d1255f6944f12 - md5: 996cf1c27ebf9466c00fc28b0080e9a1 + url: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.4.1-py312h8456570_0.conda + sha256: 4a1ce9dfc118ace8aa6f6b7272a2f6da58c210b549c6ec3579b7fc4f05aee8d8 + md5: 263b3f22e30f3fd60b94889ab23529c3 depends: - __glibc >=2.17,<3.0.a0 - affine @@ -16538,11 +16669,10 @@ packages: - click-plugins - cligj >=0.5 - libgcc >=13 - - libgdal >=3.9.2,<3.10.0a0 - libgdal-core >=3.9.2,<3.10.0a0 - libstdcxx >=13 - - numpy >=1.19,<3 - - proj >=9.4.1,<9.5.0a0 + - numpy >=1.21,<3 + - proj >=9.5.0,<9.6.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - setuptools >=0.9.8 @@ -16551,49 +16681,47 @@ packages: license_family: BSD purls: - pkg:pypi/rasterio?source=hash-mapping - size: 7247649 - timestamp: 1725458951562 + size: 7399716 + timestamp: 1727780132987 - kind: conda name: rasterio - version: 1.3.11 - build: py312he4a2ebf_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.11-py312he4a2ebf_0.conda - sha256: 394a53ccf0ed3769992b8e0fe2c83b53b1da8a9a5efb4e60abd6b67f135443a7 - md5: b172bdda0a0f039a35d621e56dd2cd36 + version: 1.4.1 + build: py312h89b8ddc_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.4.1-py312h89b8ddc_0.conda + sha256: e64fd5007ccb66e047e800ff81d40784f5dc1abf82bad73ac9abef2ce50ea256 + md5: f7620e387bae3e64e066971fd5dd37a1 depends: + - __osx >=10.13 - affine - attrs - certifi - click >=4 - click-plugins - cligj >=0.5 - - libgdal >=3.9.2,<3.10.0a0 + - libcxx >=17 - libgdal-core >=3.9.2,<3.10.0a0 - - numpy >=1.19,<3 - - proj >=9.4.1,<9.5.0a0 + - numpy >=1.21,<3 + - proj >=9.5.0,<9.6.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - setuptools >=0.9.8 - snuggs >=1.4.1 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/rasterio?source=hash-mapping - size: 7139285 - timestamp: 1725459641246 + size: 7410502 + timestamp: 1727780095937 - kind: conda name: rasterstats - version: 0.19.0 + version: 0.20.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.19.0-pyhd8ed1ab_0.conda - sha256: df72bc20e76caa2dc5a77bf1bd6899cc65fdef092fed0329eada5a93ed3729a3 - md5: 83aa6e0ab5c370e2d464fd72ef4cf155 + url: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda + sha256: c7b619d11fc1f0a20fa1004b992156d68c1f15147f321e24320e0139f573ef13 + md5: 53e0165747d26e6f2ae20399cf55bd5e depends: - affine - click >7.1 @@ -16608,56 +16736,53 @@ packages: license_family: BSD purls: - pkg:pypi/rasterstats?source=hash-mapping - size: 20607 - timestamp: 1685447856675 + size: 20723 + timestamp: 1727684612025 - kind: conda name: re2 - version: 2023.09.01 - build: h7f4b329_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda - sha256: f0f520f57e6b58313e8c41abc7dfa48742a05f1681f05654558127b667c769a8 - md5: 8f70e36268dea8eb666ef14c29bd3cda + version: 2023.11.01 + build: h2fb0a26_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.11.01-h2fb0a26_0.conda + sha256: e38f014d9905a59a4deefb30f8c848ac1f829664795830d89bd0aea9cf0ee8f4 + md5: 6ec7183fedc2a75b1897cc29ffbd4288 depends: - - libre2-11 2023.09.01 h5a48ba9_2 + - libre2-11 2023.11.01 hd530cb8_0 license: BSD-3-Clause license_family: BSD purls: [] - size: 26617 - timestamp: 1708946796423 + size: 26834 + timestamp: 1728466533363 - kind: conda name: re2 - version: 2023.09.01 - build: hb168e87_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.09.01-hb168e87_2.conda - sha256: 5739ed2cfa62ed7f828eb4b9e6e69ff1df56cb9a9aacdc296451a3cb647034eb - md5: 266f8ca8528fc7e0fa31066c309ad864 + version: 2023.11.01 + build: h77b4e00_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.11.01-h77b4e00_0.conda + sha256: 48a802f9498d467cf2f26dca8cf9be710934a6d20978bb8d90dabdf261cd4206 + md5: 1130d7820f70a485cbdda10e166d31de depends: - - libre2-11 2023.09.01 h81f5012_2 + - libre2-11 2023.11.01 hbbce691_0 license: BSD-3-Clause license_family: BSD purls: [] - size: 26814 - timestamp: 1708947195067 + size: 26664 + timestamp: 1728466449788 - kind: conda name: re2 - version: 2023.09.01 - build: hd3b24a8_2 - build_number: 2 + version: 2023.11.01 + build: hd3b24a8_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/re2-2023.09.01-hd3b24a8_2.conda - sha256: 929744a982215ea19f6f9a9d00c782969cd690bfddeeb650a39df1536af577fe - md5: ffeb985810bc7d103662e1465c758847 + url: https://conda.anaconda.org/conda-forge/win-64/re2-2023.11.01-hd3b24a8_0.conda + sha256: af6521397c0a502cf8817d7a4d0f25b10090064ca4452b03919d101a5decdf3b + md5: 2724b2bc2ce3c8581212063682c7210e depends: - - libre2-11 2023.09.01 hf8d8778_2 + - libre2-11 2023.11.01 h4eb7d71_0 license: BSD-3-Clause license_family: BSD purls: [] - size: 207315 - timestamp: 1708947529390 + size: 213974 + timestamp: 1728466677561 - kind: conda name: readline version: '8.2' @@ -16804,13 +16929,13 @@ packages: editable: true - kind: conda name: ribasim - version: 2024.10.0 + version: 2024.11.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.10.0-pyhd8ed1ab_0.conda - sha256: 8763fe14e25ce12407e2b56e3ec3019dc93416a3fae838c6c5c5c39e4a2ea61e - md5: 6764d892db65d2b0f2ac8a072d61d2f0 + url: https://conda.anaconda.org/conda-forge/noarch/ribasim-2024.11.0-pyhd8ed1ab_0.conda + sha256: aefab229fbeeb618a675d7577dce1d388d22fb721844765a014ea38b53d43d9b + md5: f3c29e643dd6869b05b861ecb9346981 depends: - geopandas - matplotlib-base @@ -16828,8 +16953,8 @@ packages: license_family: MIT purls: - pkg:pypi/ribasim?source=hash-mapping - size: 38500 - timestamp: 1721804986901 + size: 225176 + timestamp: 1728473039990 - kind: pypi name: ribasim-nl version: 0.1.0 @@ -16842,24 +16967,24 @@ packages: editable: true - kind: conda name: rich - version: 13.8.1 + version: 13.9.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.8.1-pyhd8ed1ab_0.conda - sha256: eb7d88222ec1a05c1b333aab5ca9bf486f2005f17c0d86a6b7653da47c6c143b - md5: 748f1807fa7fda73651795c5617b9557 + url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda + sha256: 7d481312e97df9ab914151c8294caff4a48f6427e109715445897166435de2ff + md5: e56b63ff450389ba95a86e97816de7a4 depends: - markdown-it-py >=2.2.0 - pygments >=2.13.0,<3.0.0 - - python >=3.7 + - python >=3.8 - typing_extensions >=4.0.0,<5.0.0 license: MIT license_family: MIT purls: - pkg:pypi/rich?source=hash-mapping - size: 185358 - timestamp: 1726066139954 + size: 185770 + timestamp: 1728057948663 - kind: conda name: rioxarray version: 0.17.0 @@ -16949,12 +17074,12 @@ packages: timestamp: 1725327367863 - kind: conda name: ruff - version: 0.6.4 + version: 0.6.9 build: py312h881003e_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.4-py312h881003e_0.conda - sha256: 82c8fc3fecece3fa6db6d88be239ba62b407c09071df7705664cbfaf7550b388 - md5: 302b3f9a3d88d6d535da9d8fe663eb7d + url: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.9-py312h881003e_0.conda + sha256: 7245ce8c79e67e6b875a3ac1eaeb0c79120df5c574a05515568136722de16e5c + md5: 23f21bcb92a63244513e6342a080f25e depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -16965,16 +17090,16 @@ packages: license_family: MIT purls: - pkg:pypi/ruff?source=hash-mapping - size: 6455872 - timestamp: 1725619373056 + size: 6866519 + timestamp: 1728068193073 - kind: conda name: ruff - version: 0.6.4 + version: 0.6.9 build: py312hd18ad41_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.4-py312hd18ad41_0.conda - sha256: 64e89828218eb52ba71fee66d74fbc19817ca0f914cb6e9ad3c82423e9f6d40e - md5: bbb52fcabbc926d506bed70d70e44776 + url: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.9-py312hd18ad41_0.conda + sha256: 8059c18ce229c11b15f68b2b83ea4b88b5ced8419f50771ed1280a1941acbf01 + md5: 7ac1f3b194406d4b6cca98e3d15ba7f3 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -16985,16 +17110,16 @@ packages: license_family: MIT purls: - pkg:pypi/ruff?source=hash-mapping - size: 6554879 - timestamp: 1725618160547 + size: 6955837 + timestamp: 1728067151446 - kind: conda name: ruff - version: 0.6.4 + version: 0.6.9 build: py312he6c0bb9_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.4-py312he6c0bb9_0.conda - sha256: 386e02becf61164e38b896ae9e3782d69aa34e6ef63013afd88284811e1674cd - md5: ff1f5ec398a38d04b42d0d62a962f0b9 + url: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.9-py312he6c0bb9_0.conda + sha256: 9b39feb1bba952d907910c191c58c9549a4194e99d2ac2592a3e7e1c29da829d + md5: acdf4da8a407ca2437a2e0f89d121327 depends: - __osx >=10.13 - libcxx >=17 @@ -17006,16 +17131,16 @@ packages: license_family: MIT purls: - pkg:pypi/ruff?source=hash-mapping - size: 6298299 - timestamp: 1725618483850 + size: 6699477 + timestamp: 1728067495992 - kind: conda name: s2n - version: 1.5.2 - build: h7b32b05_0 + version: 1.5.4 + build: h1380c3d_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.2-h7b32b05_0.conda - sha256: a08afbf88cf0d298da69118c12432ab76d4c2bc2972b2f9b87de95b2530cfae8 - md5: daf6322364fe6fc46c515d4d3d0051c2 + url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.4-h1380c3d_0.conda + sha256: b5145c74e781511ea55dad60dbb45e1053be1543d2577b29f4b091c96f93a65a + md5: 4e63e4713ffc9cddc3d5d435b5853b93 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -17023,8 +17148,8 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 351882 - timestamp: 1725682764682 + size: 352989 + timestamp: 1728019508986 - kind: conda name: scikit-learn version: 1.5.2 @@ -17046,6 +17171,7 @@ packages: - scipy - threadpoolctl >=3.1.0 license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/scikit-learn?source=hash-mapping size: 10393222 @@ -17070,6 +17196,7 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/scikit-learn?source=hash-mapping size: 9215977 @@ -17094,6 +17221,7 @@ packages: - scipy - threadpoolctl >=3.1.0 license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/scikit-learn?source=hash-mapping size: 9479906 @@ -17237,32 +17365,32 @@ packages: timestamp: 1712585816346 - kind: conda name: setuptools - version: 73.0.1 + version: 75.1.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda - sha256: c9f5e110e3fe5a7c4cd5b9da445c05a1fae000b43ab3a97cb6a501f4267515fc - md5: f0b618d7673d1b2464f600b34d912f6f + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + sha256: 6725235722095c547edd24275053c615158d6163f396550840aebd6e209e4738 + md5: d5cd48392c67fb6849ba459c2c2b671f depends: - python >=3.8 license: MIT license_family: MIT purls: - - pkg:pypi/setuptools?source=compressed-mapping - size: 1460460 - timestamp: 1725348602179 + - pkg:pypi/setuptools?source=hash-mapping + size: 777462 + timestamp: 1727249510532 - kind: conda name: shapely version: 2.0.6 - build: py312h3a88d77_1 - build_number: 1 + build: py312h0c580ee_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h3a88d77_1.conda - sha256: 1af8c26dc5507f60e4459975d228e7e329119ec9311ebb88c98603838a9871d2 - md5: 4f5c4f3160397a63c7b15c81f6c0e9b3 + url: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h0c580ee_2.conda + sha256: ab01255f62a50bffd1060b4eccb744812cfdb17d2e881af3d00fc94b0bb1bbe5 + md5: 47e5eab5c53da52540d057b8d73ac49a depends: - - geos >=3.12.2,<3.12.3.0a0 + - geos >=3.13.0,<3.13.1.0a0 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -17273,20 +17401,20 @@ packages: license_family: BSD purls: - pkg:pypi/shapely?source=hash-mapping - size: 534109 - timestamp: 1725394700590 + size: 534759 + timestamp: 1727274378841 - kind: conda name: shapely version: 2.0.6 - build: py312h6cab151_1 - build_number: 1 + build: py312h391bc85_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda - sha256: b7818c7264926401b78b0afc9a7d2c98ff0fc0ed637ad9e5c126da38a40382f7 - md5: 5be02e05e1adaa42826cc6800ce399bc + url: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda + sha256: f8668874427468e53e08f33903c8040415807fd9efb09c92b4592778654d6027 + md5: eb476b4975ea28ac12ff469063a71f5d depends: - __glibc >=2.17,<3.0.a0 - - geos >=3.12.2,<3.12.3.0a0 + - geos >=3.13.0,<3.13.1.0a0 - libgcc >=13 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 @@ -17295,20 +17423,20 @@ packages: license_family: BSD purls: - pkg:pypi/shapely?source=hash-mapping - size: 571255 - timestamp: 1725394110104 + size: 571386 + timestamp: 1727273539771 - kind: conda name: shapely version: 2.0.6 - build: py312h8047845_1 - build_number: 1 + build: py312h4ff98d2_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h8047845_1.conda - sha256: 8dbacc971bc78375537f4fc8879cb8d957f08d98622bf6262eaf3f8ea9cb31b3 - md5: 2f4d580436a2f2162275c9994dd706cb + url: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h4ff98d2_2.conda + sha256: c9d4ab587dc7b254ed8eea95bf9f09d5f461b84b115789599cf5d83a74362ef4 + md5: df8305eeb00bcefb7b2b8c3d4b8dac3d depends: - __osx >=10.13 - - geos >=3.12.2,<3.12.3.0a0 + - geos >=3.13.0,<3.13.1.0a0 - numpy >=1.19,<3 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -17316,8 +17444,8 @@ packages: license_family: BSD purls: - pkg:pypi/shapely?source=hash-mapping - size: 538593 - timestamp: 1725394128298 + size: 535513 + timestamp: 1727273654827 - kind: conda name: simplejson version: 3.19.3 @@ -17686,22 +17814,21 @@ packages: timestamp: 1665138565317 - kind: conda name: tbb - version: 2021.13.0 - build: hc790b64_0 + version: 2021.7.0 + build: h91493d7_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-hc790b64_0.conda - sha256: 990dbe4fb42f14700c22bd434d8312607bf8d0bd9f922b054e51fda14c41994c - md5: 28496a1e6af43c63927da4f80260348d + url: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.7.0-h91493d7_0.tar.bz2 + sha256: c3d607499a6e097f4b8b27048ee7166319fd3dfe98aea9e69a69a3d087b986e3 + md5: f57be598137919e4f7e7d159960d66a1 depends: - - libhwloc >=2.11.1,<2.11.2.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - vs2015_runtime >=14.29.30139 license: Apache-2.0 license_family: APACHE purls: [] - size: 151494 - timestamp: 1725532984828 + size: 178574 + timestamp: 1668617991077 - kind: conda name: tblib version: 3.0.0 @@ -17798,28 +17925,28 @@ packages: timestamp: 1714400228771 - kind: conda name: tiledb - version: 2.26.0 - build: h313d0e2_0 + version: 2.26.2 + build: h1b23fdf_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.0-h313d0e2_0.conda - sha256: 56f5f42acb0c46f8a0871c8f092850fc58fb8a542adaeb9f3f412ffccacfecde - md5: 7034c5fe1336d6f1c86299ce8e545de0 + url: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-h1b23fdf_0.conda + sha256: d394b829556f98f98528be3356ac4c5e8c5859f77b09ae6ce32efa68bbe884f2 + md5: af186cb9c0de2d3d00dc6fdffe4a7137 depends: - __osx >=10.13 - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.8.0,<1.8.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 + - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - bzip2 >=1.0.8,<2.0a0 - fmt >=11.0.2,<12.0a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libcurl >=8.9.1,<9.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libcurl >=8.10.1,<9.0a0 - libcxx >=17 - - libgoogle-cloud >=2.28.0,<2.29.0a0 - - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 + - libgoogle-cloud >=2.29.0,<2.30.0a0 + - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 @@ -17829,81 +17956,81 @@ packages: license: MIT license_family: MIT purls: [] - size: 3960161 - timestamp: 1726059270643 + size: 3974130 + timestamp: 1727662107924 - kind: conda name: tiledb - version: 2.26.0 - build: h86fa3b2_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda - sha256: 3e92cec15daed5e03d7fc676a021500fc92ac80716495504537d6e4bdb80138f - md5: 061175d9d4c046a1cf8bffe95a359fab + version: 2.26.2 + build: h34a6a78_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h34a6a78_0.conda + sha256: 1bce3c5f3306f7d31ece1992e90fbba1bd3033b2d7e6bc18a459bace2d4d487f + md5: 26a64a1718f8bdc76a18389be0294aa5 depends: - - __glibc >=2.17,<3.0.a0 - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.8.0,<1.8.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 + - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - bzip2 >=1.0.8,<2.0a0 - fmt >=11.0.2,<12.0a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libcurl >=8.9.1,<9.0a0 - - libgcc >=13 - - libgoogle-cloud >=2.28.0,<2.29.0a0 - - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 - - libstdcxx >=13 + - libabseil >=20240722.0,<20240723.0a0 + - libcrc32c >=1.1.2,<1.2.0a0 + - libcurl >=8.10.1,<9.0a0 + - libgoogle-cloud >=2.29.0,<2.30.0a0 + - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.3.2,<4.0a0 - spdlog >=1.14.1,<1.15.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 license: MIT license_family: MIT purls: [] - size: 4537477 - timestamp: 1726059097900 + size: 3114129 + timestamp: 1727662671400 - kind: conda name: tiledb - version: 2.26.0 - build: h98a567f_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.0-h98a567f_0.conda - sha256: 823d6d5c172cd90b105553d5dd93e07e0860c8e5751deb3cd076b684366797d7 - md5: 451f161732757b5124fc3a320401c587 + version: 2.26.2 + build: hedb9d39_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hedb9d39_0.conda + sha256: 061966f73c60baecda32cb243dbb292033d4f921e776ef2e0a1d1f13efd29d5f + md5: 40304fbbc36f8dc937adf74cdf024925 depends: - - aws-crt-cpp >=0.28.2,<0.28.3.0a0 - - aws-sdk-cpp >=1.11.379,<1.11.380.0a0 + - __glibc >=2.17,<3.0.a0 + - aws-crt-cpp >=0.28.3,<0.28.4.0a0 + - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.8.0,<1.8.1.0a0 - - azure-storage-blobs-cpp >=12.12.0,<12.12.1.0a0 - - azure-storage-common-cpp >=12.7.0,<12.7.1.0a0 + - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - bzip2 >=1.0.8,<2.0a0 - fmt >=11.0.2,<12.0a0 - libabseil * cxx17* - - libabseil >=20240116.2,<20240117.0a0 - - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl >=8.9.1,<9.0a0 - - libgoogle-cloud >=2.28.0,<2.29.0a0 - - libgoogle-cloud-storage >=2.28.0,<2.29.0a0 + - libabseil >=20240722.0,<20240723.0a0 + - libcurl >=8.10.1,<9.0a0 + - libgcc >=13 + - libgoogle-cloud >=2.29.0,<2.30.0a0 + - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 + - libstdcxx >=13 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.3.2,<4.0a0 - spdlog >=1.14.1,<1.15.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 license: MIT license_family: MIT purls: [] - size: 3093646 - timestamp: 1726059615242 + size: 4572331 + timestamp: 1727662399128 - kind: conda name: tinycss2 version: 1.3.0 @@ -17992,55 +18119,55 @@ packages: timestamp: 1604308660817 - kind: conda name: tomli - version: 2.0.1 + version: 2.0.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - sha256: 4cd48aba7cd026d17e86886af48d0d2ebc67ed36f87f6534f4b67138f5a5a58f - md5: 5844808ffab9ebdb694585b50ba02a96 + url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + sha256: 5e742ba856168b606ac3c814d247657b1c33b8042371f1a08000bdc5075bc0cc + md5: e977934e00b355ff55ed154904044727 depends: - python >=3.7 license: MIT license_family: MIT purls: - pkg:pypi/tomli?source=hash-mapping - size: 15940 - timestamp: 1644342331069 + size: 18203 + timestamp: 1727974767524 - kind: conda name: tomli-w - version: 1.0.0 + version: 1.1.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.0.0-pyhd8ed1ab_0.tar.bz2 - sha256: efb5f78a224c4bb14aab04690c9912256ea12c3a8b8413e60167573ce1282b02 - md5: 73506d1ab4202481841c68c169b7ef6c + url: https://conda.anaconda.org/conda-forge/noarch/tomli-w-1.1.0-pyhd8ed1ab_0.conda + sha256: 25b88bb2c4e79be642d8e5b5738781404055cd596403a20511e6fa30f0c71585 + md5: 2c5eb5b3a0fd2c4787d8162f57da2a20 depends: - - python >=3.7 + - python >=3.9 license: MIT license_family: MIT purls: - pkg:pypi/tomli-w?source=hash-mapping - size: 10052 - timestamp: 1638551820635 + size: 12323 + timestamp: 1728405537678 - kind: conda name: toolz - version: 0.12.1 + version: 1.0.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - sha256: 22b0a9790317526e08609d5dfdd828210ae89e6d444a9e954855fc29012e90c6 - md5: 2fcb582444635e2c402e8569bb94e039 + url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda + sha256: 6371cf3cf8292f2abdcc2bf783d6e70203d72f8ff0c1625f55a486711e276c75 + md5: 34feccdd4177f2d3d53c73fc44fd9a37 depends: - - python >=3.7 + - python >=3.8 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/toolz?source=hash-mapping - size: 52358 - timestamp: 1706112720607 + size: 52623 + timestamp: 1728059623353 - kind: conda name: tornado version: 6.4.1 @@ -18157,53 +18284,53 @@ packages: timestamp: 1721540525136 - kind: conda name: types-python-dateutil - version: 2.9.0.20240906 - build: pyhd8ed1ab_0 + version: 2.9.0.20241003 + build: pyhff2d567_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240906-pyhd8ed1ab_0.conda - sha256: 737fecb4b6f85a6a85f3fff6cdf5e90c5922b468e036b98f6c1559780cb79664 - md5: 07c483202a209cd23594b62b3451045e + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + sha256: 8489af986daebfbcd13d3748ba55431259206e37f184ab42a57e107fecd85e02 + md5: 3d326f8a2aa2d14d51d8c513426b5def depends: - python >=3.6 license: Apache-2.0 AND MIT purls: - pkg:pypi/types-python-dateutil?source=hash-mapping - size: 21789 - timestamp: 1725623878468 + size: 21765 + timestamp: 1727940339297 - kind: conda name: types-pytz - version: 2024.1.0.20240417 + version: 2024.2.0.20241003 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda - sha256: cc3913a5504b867c748981ba302e82dbc2bda71837f4894d29db8f6cb490e25d - md5: 7b71ace1b99195041329427c435b8125 + url: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda + sha256: 6e045899904f488888dbed49f44e3ede438e88db9523ec61289fb4fdef4a53b8 + md5: 42775c62ac0671b0d700c754256d5c19 depends: - python >=3.6 license: Apache-2.0 AND MIT purls: - pkg:pypi/types-pytz?source=hash-mapping - size: 18725 - timestamp: 1713337633292 + size: 18634 + timestamp: 1727940306315 - kind: conda name: types-requests - version: 2.32.0.20240907 + version: 2.32.0.20240914 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda - sha256: fd7952c3b23cab240aeb8ba49d994d69de2096daa48633e3894c36e9894dc66a - md5: 9f907bdcfc41daad16bee14d959b18aa + url: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda + sha256: 586def52571620788bc095766f60c9a56b448a492a360e4ab1471ca73bdd21fa + md5: f3cf0cb8f627fa3dff2093a8546074fa depends: - python >=3.8 - urllib3 >=2 license: Apache-2.0 AND MIT purls: - pkg:pypi/types-requests?source=hash-mapping - size: 26305 - timestamp: 1725694580160 + size: 26329 + timestamp: 1726293973214 - kind: conda name: typing-extensions version: 4.12.2 @@ -18354,33 +18481,32 @@ packages: timestamp: 1725600364789 - kind: conda name: tzdata - version: 2024a - build: h8827d51_1 - build_number: 1 + version: 2024b + build: hc8b5060_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - sha256: 7d21c95f61319dba9209ca17d1935e6128af4235a67ee4e57a00908a1450081e - md5: 8bfdead4e0fff0383ae4c9c50d0531bd + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf + md5: 8ac3367aafb1cc0a068483c580af8015 license: LicenseRef-Public-Domain purls: [] - size: 124164 - timestamp: 1724736371498 + size: 122354 + timestamp: 1728047496079 - kind: conda name: ucrt version: 10.0.22621.0 - build: h57928b3_0 + build: h57928b3_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - sha256: f29cdaf8712008f6b419b8b1a403923b00ab2504bfe0fb2ba8eb60e72d4f14c6 - md5: 72608f6cd3e5898229c3ea16deb1ac43 + url: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + sha256: db8dead3dd30fb1a032737554ce91e2819b43496a0db09927edf01c32b577450 + md5: 6797b005cd0f439c4c5c9ac565783700 constrains: - vs2015_runtime >=14.29.30037 - license: LicenseRef-Proprietary - license_family: PROPRIETARY + license: LicenseRef-MicrosoftWindowsSDK10 purls: [] - size: 1283972 - timestamp: 1666630199266 + size: 559710 + timestamp: 1728377334097 - kind: conda name: ukkonen version: 1.0.1 @@ -18514,14 +18640,13 @@ packages: timestamp: 1715010035325 - kind: conda name: urllib3 - version: 2.2.2 - build: pyhd8ed1ab_1 - build_number: 1 + version: 2.2.3 + build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda - sha256: 00c47c602c03137e7396f904eccede8cc64cc6bad63ce1fc355125df8882a748 - md5: e804c43f58255e977093a2298e442bb8 + url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda + sha256: b6bb34ce41cd93956ad6eeee275ed52390fb3788d6c75e753172ea7ac60b66e5 + md5: 6b55867f385dd762ed99ea687af32a69 depends: - brotli-python >=1.0.9 - h2 >=4,<5 @@ -18532,53 +18657,53 @@ packages: license_family: MIT purls: - pkg:pypi/urllib3?source=hash-mapping - size: 95048 - timestamp: 1719391384778 + size: 98076 + timestamp: 1726496531769 - kind: conda name: vc version: '14.3' - build: h8a93ad2_21 - build_number: 21 + build: ha32ba9b_22 + build_number: 22 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_21.conda - sha256: f14f5238c2e2516e292af43d91df88f212d769b4853eb46d03291793dcf00da9 - md5: e632a9b865d4b653aa656c9fb4f4817c + url: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_22.conda + sha256: 2a47c5bd8bec045959afada7063feacd074ad66b170c1ea92dd139b389fcf8fd + md5: 311c9ba1dfdd2895a8cb08346ff26259 depends: - - vc14_runtime >=14.40.33810 + - vc14_runtime >=14.38.33135 track_features: - vc14 license: BSD-3-Clause license_family: BSD purls: [] - size: 17243 - timestamp: 1725984095174 + size: 17447 + timestamp: 1728400826998 - kind: conda name: vc14_runtime version: 14.40.33810 - build: ha82c5b3_21 - build_number: 21 + build: hcc2c482_22 + build_number: 22 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-ha82c5b3_21.conda - sha256: c3bf51bff7db39ad7e890dbef1b1026df0af36975aea24dea7c5fe1e0b382c40 - md5: b3ebb670caf046e32b835fbda056c4f9 + url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_22.conda + sha256: 4c669c65007f88a7cdd560192f7e6d5679d191ac71610db724e18b2410964d64 + md5: ce23a4b980ee0556a118ed96550ff3f3 depends: - ucrt >=10.0.20348.0 constrains: - - vs2015_runtime 14.40.33810.* *_21 - license: LicenseRef-ProprietaryMicrosoft + - vs2015_runtime 14.40.33810.* *_22 + license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime license_family: Proprietary purls: [] - size: 751757 - timestamp: 1725984166774 + size: 750719 + timestamp: 1728401055788 - kind: conda name: virtualenv - version: 20.26.4 + version: 20.26.6 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda - sha256: 6eeb4f9e541f2e5198185c44ab4f5a2bdf700ca395b18617e12a8e00cf176d05 - md5: 14c15fa7def506fe7d1a0e3abdc212d6 + url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + sha256: 23128da47bc0b42b0fef0d41efc10d8ea1fb8232f0846bc4513eeba866f20d13 + md5: a7aa70aa30c47aeb84672621a85a4ef8 depends: - distlib <1,>=0.3.7 - filelock <4,>=3.12.2 @@ -18588,32 +18713,32 @@ packages: license_family: MIT purls: - pkg:pypi/virtualenv?source=hash-mapping - size: 4886907 - timestamp: 1725779361477 + size: 4875601 + timestamp: 1727513873376 - kind: conda name: vs2015_runtime version: 14.40.33810 - build: h3bf8584_21 - build_number: 21 + build: h3bf8584_22 + build_number: 22 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_21.conda - sha256: 472410455c381e406ec8c1d3e0342b48ee23122ef7ffb22a09d9763ca5df4d20 - md5: b3f37db7b7ae1c22600fa26a63ed99b3 + url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_22.conda + sha256: 80aa9932203d65a96f817b8be4fafc176fb2b3fe6cf6899ede678b8f0317fbff + md5: 8c6b061d44cafdfc8e8c6eb5f100caf0 depends: - vc14_runtime >=14.40.33810 license: BSD-3-Clause license_family: BSD purls: [] - size: 17241 - timestamp: 1725984096440 + size: 17453 + timestamp: 1728400827536 - kind: conda name: watchdog - version: 5.0.2 + version: 5.0.3 build: py312h2e8e312_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.2-py312h2e8e312_0.conda - sha256: 580d88e105f473d12d074a5c5367d2c6a59f9163b44cf1dea7e29bb18d70817c - md5: 15280b51a3c50c02fc2ac260ecf90454 + url: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.3-py312h2e8e312_0.conda + sha256: 7fab8f245cae8b08b40cf369b5a4a1fb113feef1ccf53577c21da8f1e8cbf819 + md5: 527fc6d1fe7b8685258d210d225f6ee6 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -18622,16 +18747,16 @@ packages: license_family: APACHE purls: - pkg:pypi/watchdog?source=hash-mapping - size: 166180 - timestamp: 1725821602917 + size: 166812 + timestamp: 1727641846254 - kind: conda name: watchdog - version: 5.0.2 + version: 5.0.3 build: py312h7900ff3_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.2-py312h7900ff3_0.conda - sha256: 1c99f785aa6740c9e9e566e766f5499d7b55c5a07d75a348ce955de3247fcf31 - md5: c478f125efbb575135a9280ebd4fa01c + url: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.3-py312h7900ff3_0.conda + sha256: cbe1b1dca16f81a0f8f8e71e75b4c3b4fbb1ef7838b239c0d453ee0083cf311b + md5: 852f2daaf2b8c249d1cda5001dbfecbe depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -18640,16 +18765,16 @@ packages: license_family: APACHE purls: - pkg:pypi/watchdog?source=hash-mapping - size: 140866 - timestamp: 1725821237546 + size: 142401 + timestamp: 1727641411235 - kind: conda name: watchdog - version: 5.0.2 + version: 5.0.3 build: py312hb553811_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.2-py312hb553811_0.conda - sha256: fc2fbd8c54a20fa4b021b772a62da3af9d5e3d9a6b84e4b083a0ffbd08ed9025 - md5: fd6318dc9fc957ec9145b49dd9c31b79 + url: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.3-py312hb553811_0.conda + sha256: 1049d638441f7a60eb6bec104337f230ab7223af0450d3fbacd48e67ebd119e4 + md5: 3557f603047004bdb0aa0c51866d3a56 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -18659,8 +18784,8 @@ packages: license_family: APACHE purls: - pkg:pypi/watchdog?source=hash-mapping - size: 149711 - timestamp: 1725821254325 + size: 150433 + timestamp: 1727641608876 - kind: conda name: wayland version: 1.23.1 @@ -18769,21 +18894,21 @@ packages: - kind: conda name: win_inet_pton version: 1.1.0 - build: pyhd8ed1ab_6 - build_number: 6 + build: pyh7428d3b_7 + build_number: 7 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2 - sha256: a11ae693a0645bf6c7b8a47bac030be9c0967d0b1924537b9ff7458e832c0511 - md5: 30878ecc4bd36e8deeea1e3c151b2e0b + url: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyh7428d3b_7.conda + sha256: c5297692ab34aade5e21107abaf623d6f93847662e25f655320038d2bfa1a812 + md5: c998c13b2f998af57c3b88c7a47979e0 depends: - __win - python >=3.6 - license: PUBLIC-DOMAIN + license: LicenseRef-Public-Domain purls: - pkg:pypi/win-inet-pton?source=hash-mapping - size: 8191 - timestamp: 1667051294134 + size: 9602 + timestamp: 1727796413384 - kind: conda name: winpty version: 0.4.3 @@ -18862,43 +18987,45 @@ packages: - kind: conda name: xarray version: 2024.9.0 - build: pyhd8ed1ab_0 + build: pyhd8ed1ab_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_0.conda - sha256: 416f009d6513f73ca2c02fbb65f626c1730b534741a752e74c9b2cd7b1f57edf - md5: 2cde8ed028a0fd8f35d7f9b44839d362 + url: https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda + sha256: 8bb5b522cdf1905d831a9b371a3a3bd2932a9f53398332fbd38ed3442015bbaf + md5: dc790d427d89b85ae12fc094e264833f depends: - numpy >=1.24 - packaging >=23.1 - pandas >=2.1 - python >=3.10 constrains: - - dask-core >=2023.9 - - flox >=0.7 - - numba >=0.57 - - h5py >=3.8 - - hdf5 >=1.12 - - netcdf4 >=1.6.0 - - scipy >=1.11 - - zarr >=2.16 - - sparse >=0.14 - - cftime >=1.6 - - iris >=3.7 - - seaborn >=0.12 + - seaborn-base >=0.12 - distributed >=2023.9 - - matplotlib-base >=3.7 - - pint >=0.22 + - scipy >=1.11 + - netcdf4 >=1.6.0 + - toolz >=0.12 - nc-time-axis >=1.4 - - bottleneck >=1.3 + - cftime >=1.6 - h5netcdf >=1.2 + - matplotlib-base >=3.7 + - h5py >=3.8 + - zarr >=2.16 + - hdf5 >=1.12 + - numba >=0.57 + - iris >=3.7 - cartopy >=0.22 - - toolz >=0.12 + - dask-core >=2023.9 + - flox >=0.7 + - bottleneck >=1.3 + - pint >=0.22 + - sparse >=0.14 license: Apache-2.0 + license_family: APACHE purls: - - pkg:pypi/xarray?source=compressed-mapping - size: 802366 - timestamp: 1726135055732 + - pkg:pypi/xarray?source=hash-mapping + size: 801066 + timestamp: 1728453306227 - kind: conda name: xcb-util version: 0.4.1 @@ -18910,7 +19037,7 @@ packages: md5: 8637c3e5821654d0edf97e2b0404b443 depends: - libgcc-ng >=12 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 license: MIT license_family: MIT purls: [] @@ -18928,7 +19055,7 @@ packages: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - libxcb >=1.13 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 - xcb-util-image >=0.4.0,<0.5.0a0 - xcb-util-renderutil >=0.3.10,<0.4.0a0 license: MIT @@ -18947,7 +19074,7 @@ packages: md5: a0901183f08b6c7107aab109733a3c91 depends: - libgcc-ng >=12 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 - xcb-util >=0.4.1,<0.5.0a0 license: MIT license_family: MIT @@ -18964,7 +19091,7 @@ packages: md5: ad748ccca349aec3e91743e08b5e2b50 depends: - libgcc-ng >=12 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 license: MIT license_family: MIT purls: [] @@ -18980,7 +19107,7 @@ packages: md5: 0e0cbe0564d03a99afd5fd7b362feecd depends: - libgcc-ng >=12 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 license: MIT license_family: MIT purls: [] @@ -18996,7 +19123,7 @@ packages: md5: 608e0ef8256b81d04456e8d211eee3e8 depends: - libgcc-ng >=12 - - libxcb >=1.16,<1.17.0a0 + - libxcb >=1.16,<2.0.0a0 license: MIT license_family: MIT purls: [] @@ -19005,448 +19132,433 @@ packages: - kind: conda name: xerces-c version: 3.2.5 - build: h666cd97_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h666cd97_1.conda - sha256: ae917685dc70a66800216343eef82f14a508cbad27e71d4caf17fcbda9e8b2d0 - md5: 97e8ef960a53cf08f2c4ceec8cf9e10d + build: h197e74d_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-h197e74d_2.conda + sha256: 6218762b3ecff8e365f2880bb6a762b195e350159510d3f2dba58fa53f90a1bf + md5: 559e2c3fb2fe4bfc985e8486bad8ecaa depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - icu >=75.1,<76.0a0 - - libcurl >=8.8.0,<9.0a0 - - libgcc-ng >=12 - - libnsl >=2.0.1,<2.1.0a0 - - libstdcxx-ng >=12 + - libcxx >=17 license: Apache-2.0 license_family: Apache purls: [] - size: 1631030 - timestamp: 1721031385061 + size: 1352475 + timestamp: 1727734320281 - kind: conda name: xerces-c version: 3.2.5 - build: he0c23c2_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_1.conda - sha256: d90517c4ea257096a021ccb42742607e9ee034492aba697db1095321a871a638 - md5: 0a0d85bb98ea8ffb9948afe5bcbd63f7 + build: h988505b_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda + sha256: 339ab0ff05170a295e59133cd0fa9a9c4ba32b6941c8a2a73484cc13f81e248a + md5: 9dda9667feba914e0e80b95b82f7402b depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __glibc >=2.17,<3.0.a0 + - icu >=75.1,<76.0a0 + - libgcc >=13 + - libnsl >=2.0.1,<2.1.0a0 + - libstdcxx >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 3547000 - timestamp: 1721032032254 + size: 1648243 + timestamp: 1727733890754 - kind: conda name: xerces-c version: 3.2.5 - build: hfb503d4_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/xerces-c-3.2.5-hfb503d4_1.conda - sha256: 58c07f66e7a9b6853bc25663ce83098ae0ef2dc8f8ac383b9e708d9cd1349813 - md5: 0a0c50f248ec412e3225e2683b49d6cb + build: he0c23c2_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.5-he0c23c2_2.conda + sha256: 759ae22a0a221dc1c0ba39684b0dcf696aab4132478e17e56a0366ded519e54e + md5: 82b6eac3c198271e98b48d52d79726d8 depends: - - __osx >=10.13 - - icu >=75.1,<76.0a0 - - libcurl >=8.8.0,<9.0a0 - - libcxx >=16 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 1348901 - timestamp: 1721031740491 + size: 3574017 + timestamp: 1727734520239 - kind: conda name: xkeyboard-config - version: '2.42' - build: h4ab18f5_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.42-h4ab18f5_0.conda - sha256: 240caab7d9d85154ef373ecbac3ff9fb424add2029dbb124e949c6cbab2996dd - md5: b193af204da1bfb8c13882d131a14bd2 - depends: - - libgcc-ng >=12 - - xorg-libx11 >=1.8.9,<2.0a0 - license: MIT - license_family: MIT - purls: [] - size: 388998 - timestamp: 1717817668629 -- kind: conda - name: xorg-fixesproto - version: '5.0' - build: h7f98852_1002 - build_number: 1002 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2 - sha256: 5d2af1b40f82128221bace9466565eca87c97726bb80bbfcd03871813f3e1876 - md5: 65ad6e1eb4aed2b0611855aff05e04f6 - depends: - - libgcc-ng >=9.3.0 - - xorg-xextproto - license: MIT - license_family: MIT - purls: [] - size: 9122 - timestamp: 1617479697350 -- kind: conda - name: xorg-inputproto - version: 2.3.2 - build: h7f98852_1002 - build_number: 1002 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2 - sha256: 6c8c2803de0f643f8bad16ece3f9a7259e4a49247543239c182d66d5e3a129a7 - md5: bcd1b3396ec6960cbc1d2855a9e60b2b - depends: - - libgcc-ng >=9.3.0 - license: MIT - license_family: MIT - purls: [] - size: 19602 - timestamp: 1610027678228 -- kind: conda - name: xorg-kbproto - version: 1.0.7 - build: h7f98852_1002 - build_number: 1002 + version: '2.43' + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - sha256: e90b0a6a5d41776f11add74aa030f789faf4efd3875c31964d6f9cfa63a10dd1 - md5: 4b230e8381279d76131116660f5a241a + url: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda + sha256: 0d89b5873515a1f05d311f37ea4e087bbccc0418afa38f2f6189e97280db3179 + md5: f725c7425d6d7c15e31f3b99a88ea02f depends: - - libgcc-ng >=9.3.0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 license: MIT license_family: MIT purls: [] - size: 27338 - timestamp: 1610027759842 + size: 389475 + timestamp: 1727840188958 - kind: conda name: xorg-libice version: 1.1.1 - build: hd590300_0 + build: hb9d3cd8_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda - sha256: 5aa9b3682285bb2bf1a8adc064cb63aff76ef9178769740d855abb42b0d24236 - md5: b462a33c0be1421532f28bfe8f4a7514 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda + sha256: ec276da68d1c4a3d34a63195b35ca5b248d4aff0812464dcd843d74649b5cec4 + md5: 19608a9656912805b2b9a2f6bd257b04 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 58469 - timestamp: 1685307573114 + size: 58159 + timestamp: 1727531850109 - kind: conda name: xorg-libsm version: 1.2.4 - build: h7391055_0 + build: he73a12e_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda - sha256: 089ad5f0453c604e18985480218a84b27009e9e6de9a0fa5f4a20b8778ede1f1 - md5: 93ee23f12bc2e684548181256edd2cf6 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda + sha256: 70e903370977d44c9120a5641ab563887bd48446e9ef6fc2a3f5f60531c2cd6c + md5: 05a8ea5f446de33006171a7afe6ae857 depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - libuuid >=2.38.1,<3.0a0 - xorg-libice >=1.1.1,<2.0a0 license: MIT license_family: MIT purls: [] - size: 27433 - timestamp: 1685453649160 + size: 27516 + timestamp: 1727634669421 - kind: conda name: xorg-libx11 - version: 1.8.9 - build: hb711507_1 - build_number: 1 + version: 1.8.10 + build: h4f16b4b_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-hb711507_1.conda - sha256: 66eabe62b66c1597c4a755dcd3f4ce2c78adaf7b32e25dfee45504d67d7735c1 - md5: 4a6d410296d7e39f00bacdee7df046e9 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda + sha256: c4650634607864630fb03696474a0535f6fce5fda7d81a6462346e071b53dfa7 + md5: 0b666058a179b744a622d0a4a0c56353 depends: - - libgcc-ng >=12 - - libxcb >=1.16,<1.17.0a0 - - xorg-kbproto - - xorg-xextproto >=7.3.0,<8.0a0 - - xorg-xproto + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libxcb >=1.17.0,<2.0a0 + - xorg-xorgproto license: MIT license_family: MIT purls: [] - size: 832198 - timestamp: 1718846846409 + size: 838308 + timestamp: 1727356837875 - kind: conda name: xorg-libxau version: 1.0.11 - build: h0dc2134_0 + build: h00291cd_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h0dc2134_0.conda - sha256: 8a2e398c4f06f10c64e69f56bcf3ddfa30b432201446a0893505e735b346619a - md5: 9566b4c29274125b0266d0177b5eb97b + url: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h00291cd_1.conda + sha256: 96177823ec38336b0f4b7e7c2413da61f8d008d800cc4a5b8ad21f9128fb7de0 + md5: c6cc91149a08402bbb313c5dc0142567 + depends: + - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 13071 - timestamp: 1684638167647 + size: 13176 + timestamp: 1727034772877 - kind: conda name: xorg-libxau version: 1.0.11 - build: hcd874cb_0 + build: h0e40799_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-hcd874cb_0.conda - sha256: 8c5b976e3b36001bdefdb41fb70415f9c07eff631f1f0155f3225a7649320e77 - md5: c46ba8712093cb0114404ae8a7582e1a + url: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-h0e40799_1.conda + sha256: f44bc6f568a9697b7e1eadc2d00ef5de0fe62efcf5e27e5ecc46f81046082faf + md5: ca66d6f8fe86dd53664e8de5087ef6b1 depends: - - m2w64-gcc-libs - - m2w64-gcc-libs-core + - libgcc >=13 + - libwinpthread >=12.0.0.r4.gg4f2fc60ca + - ucrt >=10.0.20348.0 license: MIT license_family: MIT purls: [] - size: 51297 - timestamp: 1684638355740 + size: 107925 + timestamp: 1727035280560 - kind: conda name: xorg-libxau version: 1.0.11 - build: hd590300_0 + build: hb9d3cd8_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - sha256: 309751371d525ce50af7c87811b435c176915239fc9e132b99a25d5e1703f2d4 - md5: 2c80dc38fface310c9bd81b17037fee5 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda + sha256: 532a046fee0b3a402db867b6ec55c84ba4cdedb91d817147c8feeae9766be3d6 + md5: 77cbc488235ebbaab2b6e912d3934bae depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 14468 - timestamp: 1684637984591 + size: 14679 + timestamp: 1727034741045 - kind: conda - name: xorg-libxdmcp - version: 1.1.3 - build: h35c211d_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2 - sha256: 485421c16f03a01b8ed09984e0b2ababdbb3527e1abf354ff7646f8329be905f - md5: 86ac76d6bf1cbb9621943eb3bd9ae36e + name: xorg-libxcomposite + version: 0.4.6 + build: hb9d3cd8_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda + sha256: 753f73e990c33366a91fd42cc17a3d19bb9444b9ca5ff983605fa9e953baf57f + md5: d3c295b50f092ab525ffe3c2aa4b7413 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 license: MIT license_family: MIT purls: [] - size: 17225 - timestamp: 1610071995461 + size: 13603 + timestamp: 1727884600744 - kind: conda - name: xorg-libxdmcp - version: 1.1.3 - build: h7f98852_0 + name: xorg-libxcursor + version: 1.2.2 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - sha256: 4df7c5ee11b8686d3453e7f3f4aa20ceef441262b49860733066c52cfd0e4a77 - md5: be93aabceefa2fac576e971aef407908 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.2-hb9d3cd8_0.conda + sha256: 7262935568963836efd05e0c68d5c787246578465b7a66c8bd7f0ba361d6a105 + md5: bb2638cd7fbdd980b1cff9a99a6c1fa8 depends: - - libgcc-ng >=9.3.0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + - xorg-libxrender >=0.9.11,<0.10.0a0 license: MIT license_family: MIT purls: [] - size: 19126 - timestamp: 1610071769228 + size: 31804 + timestamp: 1727796817007 +- kind: conda + name: xorg-libxdamage + version: 1.1.6 + build: hb9d3cd8_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda + sha256: 43b9772fd6582bf401846642c4635c47a9b0e36ca08116b3ec3df36ab96e0ec0 + md5: b5fcc7172d22516e1f965490e65e33a4 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + license: MIT + license_family: MIT + purls: [] + size: 13217 + timestamp: 1727891438799 - kind: conda name: xorg-libxdmcp - version: 1.1.3 - build: hcd874cb_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2 - sha256: f51205d33c07d744ec177243e5d9b874002910c731954f2c8da82459be462b93 - md5: 46878ebb6b9cbd8afcf8088d7ef00ece + version: 1.1.5 + build: h00291cd_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.5-h00291cd_0.conda + sha256: bb4d1ef9cafef535494adf9296130b6193b3a44375883185b5167de03eb1ac7f + md5: 9f438e1b6f4e73fd9e6d78bfe7c36743 depends: - - m2w64-gcc-libs + - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 67908 - timestamp: 1610072296570 + size: 18465 + timestamp: 1727794980957 - kind: conda - name: xorg-libxext - version: 1.3.4 - build: h0b41bf4_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda - sha256: 73e5cfbdff41ef8a844441f884412aa5a585a0f0632ec901da035a03e1fe1249 - md5: 82b6df12252e6f32402b96dacc656fec + name: xorg-libxdmcp + version: 1.1.5 + build: h0e40799_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.5-h0e40799_0.conda + sha256: 9075f98dcaa8e9957e4a3d9d30db05c7578a536950a31c200854c5c34e1edb2c + md5: 8393c0f7e7870b4eb45553326f81f0ff depends: - - libgcc-ng >=12 - - xorg-libx11 >=1.7.2,<2.0a0 - - xorg-xextproto + - libgcc >=13 + - libwinpthread >=12.0.0.r4.gg4f2fc60ca + - ucrt >=10.0.20348.0 license: MIT license_family: MIT purls: [] - size: 50143 - timestamp: 1677036907815 + size: 69920 + timestamp: 1727795651979 - kind: conda - name: xorg-libxfixes - version: 5.0.3 - build: h7f98852_1004 - build_number: 1004 + name: xorg-libxdmcp + version: 1.1.5 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2 - sha256: 1e426a1abb774ef1dcf741945ed5c42ad12ea2dc7aeed7682d293879c3e1e4c3 - md5: e9a21aa4d5e3e5f1aed71e8cefd46b6a + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda + sha256: 6b250f3e59db07c2514057944a3ea2044d6a8cdde8a47b6497c254520fade1ee + md5: 8035c64cb77ed555e3f150b7b3972480 depends: - - libgcc-ng >=9.3.0 - - xorg-fixesproto - - xorg-libx11 >=1.7.0,<2.0a0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 18145 - timestamp: 1617717802636 + size: 19901 + timestamp: 1727794976192 - kind: conda - name: xorg-libxi - version: 1.7.10 - build: h4bc722e_1 - build_number: 1 + name: xorg-libxext + version: 1.3.6 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda - sha256: e1416eb435e3d903bc658e3c637f0e87efd2dca290fe70daf29738b3a3d1f8ff - md5: 749baebe7e2ff3360630e069175e528b + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda + sha256: da5dc921c017c05f38a38bd75245017463104457b63a1ce633ed41f214159c14 + md5: febbab7d15033c913d53c7a2c102309d depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - xorg-inputproto - - xorg-libx11 >=1.8.9,<2.0a0 - - xorg-libxext 1.3.* - - xorg-libxext >=1.3.4,<2.0a0 - - xorg-libxfixes 5.0.* - - xorg-xextproto >=7.3.0,<8.0a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 license: MIT license_family: MIT purls: [] - size: 46794 - timestamp: 1722108216651 + size: 50060 + timestamp: 1727752228921 - kind: conda - name: xorg-libxrender - version: 0.9.11 - build: hd590300_0 + name: xorg-libxfixes + version: 6.0.1 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda - sha256: 26da4d1911473c965c32ce2b4ff7572349719eaacb88a066db8d968a4132c3f7 - md5: ed67c36f215b310412b2af935bf3e530 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda + sha256: 2fef37e660985794617716eb915865ce157004a4d567ed35ec16514960ae9271 + md5: 4bdb303603e9821baf5fe5fdff1dc8f8 depends: - - libgcc-ng >=12 - - xorg-libx11 >=1.8.6,<2.0a0 - - xorg-renderproto + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 license: MIT license_family: MIT purls: [] - size: 37770 - timestamp: 1688300707994 + size: 19575 + timestamp: 1727794961233 - kind: conda - name: xorg-libxtst - version: 1.2.5 - build: h4bc722e_0 + name: xorg-libxi + version: 1.8.2 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-h4bc722e_0.conda - sha256: 0139b52c3cbce57bfd1d120c41637bc239430faff4aa0445f58de0adf4c4b976 - md5: 185159d666308204eca00295599b0a5c + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + sha256: 1a724b47d98d7880f26da40e45f01728e7638e6ec69f35a3e11f92acd05f9e7a + md5: 17dcc85db3c7886650b8908b183d6876 depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - xorg-inputproto - - xorg-libx11 >=1.8.9,<2.0a0 - - xorg-libxext 1.3.* - - xorg-libxext >=1.3.4,<2.0a0 - - xorg-libxi 1.7.* - - xorg-libxi >=1.7.10,<2.0a0 - - xorg-recordproto + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 license: MIT license_family: MIT purls: [] - size: 32931 - timestamp: 1722575571554 + size: 47179 + timestamp: 1727799254088 - kind: conda - name: xorg-libxxf86vm - version: 1.1.5 - build: h4bc722e_1 - build_number: 1 + name: xorg-libxrandr + version: 1.5.4 + build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-h4bc722e_1.conda - sha256: 109d6b1931d1482faa0bf6de83c7e6d9ca36bbf9d36a00a05df4f63b82fce5c3 - md5: 0c90ad87101001080484b91bd9d2cdef + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda + sha256: ac0f037e0791a620a69980914a77cb6bb40308e26db11698029d6708f5aa8e0d + md5: 2de7f99d6581a4a7adbff607b5c278ca depends: - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - xorg-libx11 >=1.8.9,<2.0a0 - - xorg-libxext >=1.3.4,<2.0a0 - - xorg-xextproto >=7.3.0,<8.0a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxrender >=0.9.11,<0.10.0a0 license: MIT license_family: MIT purls: [] - size: 18443 - timestamp: 1722110433983 + size: 29599 + timestamp: 1727794874300 - kind: conda - name: xorg-recordproto - version: 1.14.2 - build: h7f98852_1002 - build_number: 1002 + name: xorg-libxrender + version: 0.9.11 + build: hb9d3cd8_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-recordproto-1.14.2-h7f98852_1002.tar.bz2 - sha256: 4b91d48fed368c83eafd03891ebfd5bae0a03adc087ebea8a680ae22da99a85f - md5: 2f835e6c386e73c6faaddfe9eda67e98 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda + sha256: f1217e902c0b1d8bc5d3ce65e483ebf38b049c823c9117b7198cfb16bd2b9143 + md5: a7a49a8b85122b49214798321e2e96b4 depends: - - libgcc-ng >=9.3.0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-xorgproto license: MIT license_family: MIT purls: [] - size: 8014 - timestamp: 1621340029114 + size: 37780 + timestamp: 1727529943015 - kind: conda - name: xorg-renderproto - version: 0.11.1 - build: h7f98852_1002 - build_number: 1002 + name: xorg-libxtst + version: 1.2.5 + build: hb9d3cd8_3 + build_number: 3 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2 - sha256: 38942930f233d1898594dd9edf4b0c0786f3dbc12065a0c308634c37fd936034 - md5: 06feff3d2634e3097ce2fe681474b534 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + sha256: 752fdaac5d58ed863bbf685bb6f98092fe1a488ea8ebb7ed7b606ccfce08637a + md5: 7bbe9a0cc0df0ac5f5a8ad6d6a11af2f depends: - - libgcc-ng >=9.3.0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxi >=1.7.10,<2.0a0 license: MIT license_family: MIT purls: [] - size: 9621 - timestamp: 1614866326326 + size: 32808 + timestamp: 1727964811275 - kind: conda - name: xorg-xextproto - version: 7.3.0 - build: h0b41bf4_1003 - build_number: 1003 + name: xorg-libxxf86vm + version: 1.1.5 + build: hb9d3cd8_3 + build_number: 3 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - sha256: b8dda3b560e8a7830fe23be1c58cc41f407b2e20ae2f3b6901eb5842ba62b743 - md5: bce9f945da8ad2ae9b1d7165a64d0f87 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_3.conda + sha256: c84404835e6f7985faa645a333bd17c6259a2b1627177db471010db9308b5d52 + md5: 2159fc3619590b4f62473b6b9631549f depends: - - libgcc-ng >=12 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 license: MIT license_family: MIT purls: [] - size: 30270 - timestamp: 1677036833037 + size: 17943 + timestamp: 1727956927910 - kind: conda - name: xorg-xproto - version: 7.0.31 - build: h7f98852_1007 - build_number: 1007 + name: xorg-xorgproto + version: '2024.1' + build: hb9d3cd8_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - sha256: f197bb742a17c78234c24605ad1fe2d88b1d25f332b75d73e5ba8cf8fbc2a10d - md5: b4a4381d54784606820704f7b5f05a15 + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda + sha256: 1316680be6edddee0156b86ec1102fc8286f51c1a5440366ed1db596a2dc3731 + md5: 7c21106b851ec72c037b162c216d8f05 depends: - - libgcc-ng >=9.3.0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 74922 - timestamp: 1607291557628 + size: 565425 + timestamp: 1726846388217 - kind: conda name: xugrid version: 0.12.1 @@ -19582,12 +19694,12 @@ packages: - kind: conda name: zeromq version: 4.3.5 - build: ha4adb4c_5 - build_number: 5 + build: h3b0a872_6 + build_number: 6 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda - sha256: dd48adc07fcd029c86fbf82e68d0e4818c7744b768e08139379920b56b582814 - md5: e8372041ebb377237db9d0d24c7b5962 + url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_6.conda + sha256: e67288b1c98a31ee58a5c07bdd873dbe08e75f752e1ad605d5e8c0697339903e + md5: 113506c8d2d558e733f5c38f6bf08c50 depends: - __glibc >=2.17,<3.0.a0 - krb5 >=1.21.3,<1.22.0a0 @@ -19597,47 +19709,47 @@ packages: license: MPL-2.0 license_family: MOZILLA purls: [] - size: 353159 - timestamp: 1725429777124 + size: 335528 + timestamp: 1728364029042 - kind: conda name: zeromq version: 4.3.5 - build: hb33e954_5 - build_number: 5 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-hb33e954_5.conda - sha256: 7e63a9ec19660666095ea9332a5b226329ff4f499018e8a281d0d160cbb60ca4 - md5: a9735eb372d515c78f8211785406e36f + build: ha9f60a1_6 + build_number: 6 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-ha9f60a1_6.conda + sha256: c37130692742cc43eedf4e23270c7d1634235acff50760025e9583f8b46b64e6 + md5: 33a78bbc44d6550c361abb058a0556e2 depends: - - __osx >=10.13 - krb5 >=1.21.3,<1.22.0a0 - - libcxx >=17 - libsodium >=1.0.20,<1.0.21.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 303596 - timestamp: 1725430161260 + size: 2701749 + timestamp: 1728364260886 - kind: conda name: zeromq version: 4.3.5 - build: he1f189c_5 - build_number: 5 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-he1f189c_5.conda - sha256: 7cfea95cc9f637ad5b651cde6bb22ddcd7989bd9b21e3c6df4958f618c13b807 - md5: a6df1c5da1f16f02e872994611dc4dfb + build: he4ceba3_6 + build_number: 6 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/zeromq-4.3.5-he4ceba3_6.conda + sha256: 0e2a6ced111fd99b66b76ec797804ab798ec190a88a2779060f7a8787c343ee0 + md5: 00ec9f2a5e21bbbd22ffbbc12b3df286 depends: + - __osx >=10.13 - krb5 >=1.21.3,<1.22.0a0 + - libcxx >=17 - libsodium >=1.0.20,<1.0.21.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MPL-2.0 license_family: MOZILLA purls: [] - size: 2710711 - timestamp: 1725430044838 + size: 290634 + timestamp: 1728364170966 - kind: conda name: zict version: 3.0.0 @@ -19657,74 +19769,75 @@ packages: timestamp: 1681770298596 - kind: conda name: zipp - version: 3.20.1 + version: 3.20.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda - sha256: 30762bd25b6fc8714d5520a223ccf20ad4a6792dc439c54b59bf44b60bf51e72 - md5: 74a4befb4b38897e19a107693e49da20 + url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + sha256: 1e84fcfa41e0afdd87ff41e6fbb719c96a0e098c1f79be342293ab0bd8dea322 + md5: 4daaed111c05672ae669f7036ee5bba3 depends: - python >=3.8 license: MIT license_family: MIT purls: - pkg:pypi/zipp?source=hash-mapping - size: 21110 - timestamp: 1724731063145 + size: 21409 + timestamp: 1726248679175 - kind: conda name: zlib version: 1.3.1 - build: h2466b09_1 - build_number: 1 + build: h2466b09_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_1.conda - sha256: 76409556e6c7cb91991cd94d7fc853c9272c2872bd7e3573ff35eb33d6fca5be - md5: f8e0a35bf6df768ad87ed7bbbc36ab04 + url: https://conda.anaconda.org/conda-forge/win-64/zlib-1.3.1-h2466b09_2.conda + sha256: 8c688797ba23b9ab50cef404eca4d004a948941b6ee533ead0ff3bf52012528c + md5: be60c4e8efa55fddc17b4131aa47acbd depends: - - libzlib 1.3.1 h2466b09_1 + - libzlib 1.3.1 h2466b09_2 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Zlib license_family: Other purls: [] - size: 108081 - timestamp: 1716874767420 + size: 107439 + timestamp: 1727963788936 - kind: conda name: zlib version: 1.3.1 - build: h4ab18f5_1 - build_number: 1 + build: hb9d3cd8_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda - sha256: cee16ab07a11303de721915f0a269e8c7a54a5c834aa52f74b1cc3a59000ade8 - md5: 9653f1bf3766164d0e65fa723cabbc54 + url: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda + sha256: 5d7c0e5f0005f74112a34a7425179f4eb6e73c92f5d109e6af4ddeca407c92ab + md5: c9f075ab2f33b3bbee9e62d4ad0a6cd8 depends: - - libgcc-ng >=12 - - libzlib 1.3.1 h4ab18f5_1 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libzlib 1.3.1 hb9d3cd8_2 license: Zlib license_family: Other purls: [] - size: 93004 - timestamp: 1716874213487 + size: 92286 + timestamp: 1727963153079 - kind: conda name: zlib version: 1.3.1 - build: h87427d6_1 - build_number: 1 + build: hd23fc13_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-h87427d6_1.conda - sha256: 41bd5fef28b2755d637e3a8ea5c84010628392fbcf80c7e3d7370aaced7ee4fe - md5: 3ac9ef8975965f9698dbedd2a4cc5894 + url: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-hd23fc13_2.conda + sha256: 219edbdfe7f073564375819732cbf7cc0d7c7c18d3f546a09c2dfaf26e4d69f3 + md5: c989e0295dcbdc08106fe5d9e935f0b9 depends: - __osx >=10.13 - - libzlib 1.3.1 h87427d6_1 + - libzlib 1.3.1 hd23fc13_2 license: Zlib license_family: Other purls: [] - size: 88782 - timestamp: 1716874245467 + size: 88544 + timestamp: 1727963189976 - kind: conda name: zstandard version: 0.23.0 diff --git a/pixi.toml b/pixi.toml index 39ce490..ee0682c 100644 --- a/pixi.toml +++ b/pixi.toml @@ -80,7 +80,7 @@ ribasim_nl = { path = "src/ribasim_nl", editable = true } ribasim = { path = "../Ribasim/python/ribasim", editable = true } [feature.prod.dependencies] -ribasim = "==2024.10.0" +ribasim = "==2024.11.0" [environments] default = { features = ["common", "prod"] } From 71b67e6eecbf783ea25fc91c12c9744db78ad3e0 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Fri, 11 Oct 2024 12:36:26 +0200 Subject: [PATCH 10/23] Afbouwen wdod vechtstromen (#163) Alle verbeteringen aan model-klasse voor het debuggen van Vechtstromen en WDOD --------- Co-authored-by: Martijn Visser --- notebooks/basin_area_nodes.py | 28 + notebooks/de_dommel/00_get_model.py | 4 +- notebooks/de_dommel/00_get_verwerkt.py | 4 +- .../drents_overijsselse_delta/00_get_model.py | 15 + .../00_get_verwerkt.py | 9 + .../01_fix_model_network.py | 403 +++++++ notebooks/vechtstromen/00_get_model.py | 15 + notebooks/vechtstromen/00_get_verwerkt.py | 9 + .../vechtstromen/01_fix_model_network.py | 1028 +++++++++++++++++ notebooks/vechtstromen/99_upload_model.py | 6 + src/ribasim_nl/ribasim_nl/geometry.py | 49 + src/ribasim_nl/ribasim_nl/model.py | 161 ++- .../ribasim_nl/network_validator.py | 26 + 13 files changed, 1744 insertions(+), 13 deletions(-) create mode 100644 notebooks/basin_area_nodes.py create mode 100644 notebooks/drents_overijsselse_delta/00_get_model.py create mode 100644 notebooks/drents_overijsselse_delta/00_get_verwerkt.py create mode 100644 notebooks/drents_overijsselse_delta/01_fix_model_network.py create mode 100644 notebooks/vechtstromen/00_get_model.py create mode 100644 notebooks/vechtstromen/00_get_verwerkt.py create mode 100644 notebooks/vechtstromen/01_fix_model_network.py create mode 100644 notebooks/vechtstromen/99_upload_model.py diff --git a/notebooks/basin_area_nodes.py b/notebooks/basin_area_nodes.py new file mode 100644 index 0000000..9f809b0 --- /dev/null +++ b/notebooks/basin_area_nodes.py @@ -0,0 +1,28 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model + +cloud = CloudStorage() + + +# %% +data = [] +for authority in cloud.water_authorities: + ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") + if ribasim_toml.exists(): + model = Model.read(ribasim_toml) + data += [ + { + "waterschap": authority, + "basin_nodes": len(model.basin.node.df), + "basin_areas": len(model.basin.area.df), + "basin_verschil": abs(len(model.basin.node.df) - len(model.basin.area.df)), + "basin_area_lt_5000m2": len(model.basin.area.df[model.basin.area.df.area < 5000]), + } + ] + +df = pd.DataFrame(data) + + +df.to_excel(cloud.joinpath("verschil_basins.xlsx"), index=False) diff --git a/notebooks/de_dommel/00_get_model.py b/notebooks/de_dommel/00_get_model.py index 56538ad..12e044a 100644 --- a/notebooks/de_dommel/00_get_model.py +++ b/notebooks/de_dommel/00_get_model.py @@ -3,7 +3,7 @@ cloud = CloudStorage() -dommel_url = cloud.joinurl("DeDommel", "modellen", "DeDommel_2024_6_3") +model_url = cloud.joinurl("DeDommel", "modellen", "DeDommel_2024_6_3") # %% -cloud.download_content(dommel_url) +cloud.download_content(model_url) diff --git a/notebooks/de_dommel/00_get_verwerkt.py b/notebooks/de_dommel/00_get_verwerkt.py index f6d7d10..5cdc8b3 100644 --- a/notebooks/de_dommel/00_get_verwerkt.py +++ b/notebooks/de_dommel/00_get_verwerkt.py @@ -3,7 +3,7 @@ cloud = CloudStorage() -dommel_url = cloud.joinurl("DeDommel", "verwerkt") +data_url = cloud.joinurl("DeDommel", "verwerkt") # %% -cloud.download_content(dommel_url) +cloud.download_content(data_url) diff --git a/notebooks/drents_overijsselse_delta/00_get_model.py b/notebooks/drents_overijsselse_delta/00_get_model.py new file mode 100644 index 0000000..2cf928b --- /dev/null +++ b/notebooks/drents_overijsselse_delta/00_get_model.py @@ -0,0 +1,15 @@ +# %% +from ribasim_nl import CloudStorage + +authority = "DrentsOverijsselseDelta" +short_name = "dod" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") + +cloud.download_content(model_url) + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if ribasim_toml.exists(): + ribasim_toml.rename(ribasim_toml.with_name(f"{short_name}.toml")) diff --git a/notebooks/drents_overijsselse_delta/00_get_verwerkt.py b/notebooks/drents_overijsselse_delta/00_get_verwerkt.py new file mode 100644 index 0000000..5cdc8b3 --- /dev/null +++ b/notebooks/drents_overijsselse_delta/00_get_verwerkt.py @@ -0,0 +1,9 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +data_url = cloud.joinurl("DeDommel", "verwerkt") + +# %% +cloud.download_content(data_url) diff --git a/notebooks/drents_overijsselse_delta/01_fix_model_network.py b/notebooks/drents_overijsselse_delta/01_fix_model_network.py new file mode 100644 index 0000000..fd1aa83 --- /dev/null +++ b/notebooks/drents_overijsselse_delta/01_fix_model_network.py @@ -0,0 +1,403 @@ +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin, level_boundary, manning_resistance, outlet +from ribasim_nl import CloudStorage, Model, NetworkValidator +from ribasim_nl.geometry import split_basin_multi_polygon + +cloud = CloudStorage() + +authority = "DrentsOverijsselseDelta" +short_name = "dod" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") +hydroobject_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject", fid_as_index=True +) +duikersifonhevel_gdf = gpd.read_file( + cloud.joinpath(authority, "aangeleverd", "Aanlevering_202311", "HyDAMO_WM_20231117.gpkg"), + fid_as_index=True, + layer="duikersifonhevel", +) + +split_line_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True +) + +# level_boundary_gdf = gpd.read_file( +# cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="level_boundary", fid_as_index=True +# ) + +# %% read model +model = Model.read(ribasim_toml) +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_fix_model_network", f"{short_name}.toml") +network_validator = NetworkValidator(model) + +# %% some stuff we'll need again +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +level_data = level_boundary.Static(level=[0]) + +basin_data = [ + basin.Profile(level=[0.0, 1.0], area=[0.01, 1000.0]), + basin.Static( + drainage=[0.0], + potential_evaporation=[0.001 / 86400], + infiltration=[0.0], + precipitation=[0.005 / 86400], + ), + basin.State(level=[0]), +] +outlet_data = outlet.Static(flow_rate=[100]) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2393424844 +# Verwijderen duplicate edges + +model.edge.df.drop_duplicates(inplace=True) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2393458802 + +# Toevoegen ontbrekende basins (oplossen topologie) +model.remove_node(7, remove_edges=True) +model.remove_node(84, remove_edges=True) +basin_edges_df = network_validator.edge_incorrect_connectivity() +basin_nodes_df = network_validator.node_invalid_connectivity() + +for row in basin_nodes_df.itertuples(): + # maak basin-node + basin_node = model.basin.add(Node(geometry=row.geometry), tables=basin_data) + + # update edge_table + model.edge.df.loc[basin_edges_df[basin_edges_df.from_node_id == row.node_id].index, ["from_node_id"]] = ( + basin_node.node_id + ) + model.edge.df.loc[basin_edges_df[basin_edges_df.to_node_id == row.node_id].index, ["to_node_id"]] = ( + basin_node.node_id + ) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2393672367 + +# Omdraaien edge-richting rondom outlets (inlaten/uitlaten) +# for edge_id in [2282, ] + +# https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2393731749 + +# Opruimen Reeve + +# basin 2484 wordt LevelBoundary (IJssel) +model.update_node(2484, "LevelBoundary", data=[level_data]) + +# nodes 1536, 762, 761, 1486 + aangesloten edges gooien we weg +for node_id in [1536, 762, 761, 1486]: + model.remove_node(node_id, remove_edges=True) + +# edges 2841, 2842, 2843, 2846 gooien we weg +model.remove_edges([2841, 2842, 2843, 2846]) + +# duiker 286309 voegen we toe +kdu = duikersifonhevel_gdf.loc[9063] +outlet_node = model.outlet.add( + Node(geometry=kdu.geometry.interpolate(0.5, normalized=True), name=f"duikersifonhevel.{kdu.objectid}"), + tables=[outlet_data], +) + +model.edge.add(model.level_boundary[10], outlet_node) +model.edge.add(outlet_node, model.basin[2240]) +model.edge.add(model.manning_resistance[849], model.basin[2240]) +model.edge.add(model.manning_resistance[760], model.basin[2240]) +model.edge.add(model.tabulated_rating_curve[187], model.basin[2240]) +model.edge.add(model.basin[2240], model.pump[1100]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2393871075 + +# Ramsgeul bij Ramspol +for node_id in [81, 839]: + model.remove_node(node_id, remove_edges=True) + +model.update_node(83, "Basin", data=basin_data) +model.move_node(83, hydroobject_gdf.at[21045, "geometry"].boundary.geoms[0]) +model.reverse_edge(edge_id=3013) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2399104407 + +# Deelstroomgebied Frysland + +# Nieuwe Kanaal / Tussen Linde voorzien van basin nabij gemaal.20 (node_id 701) +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[19671, "geometry"].boundary.geoms[1]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[19671, "geometry"].interpolate(0.5, normalized=True)), + tables=[outlet_data], +) + +# basin 1623 verbinden met inlaatduikers (3x) en gemaal 1623; overige verbindingen verwijderen. +model.remove_edges([3038, 3040, 3037, 3041, 3039]) + +# nw basin verbinden met gemaal 20, level boundary 94 en alle inlaatduikers +model.reverse_edge(edge_id=2282) +model.edge.add(outlet_node, model.level_boundary[94]) +model.edge.add(basin_node, outlet_node) +model.edge.add(basin_node, model.manning_resistance[1182]) +model.edge.add(basin_node, model.manning_resistance[969]) +model.edge.add(basin_node, model.manning_resistance[1050]) +model.edge.add(basin_node, model.outlet[539]) +model.edge.add(model.pump[701], basin_node) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2399209787 + +# Aansluiten NW boezem op Fryslan + +# basin /area 1681 op te knippen nabij basin 1717 (rode lijn) +model.split_basin(split_line_gdf.at[14, "geometry"]) +model.basin.area.df = model.basin.area.df[model.basin.area.df.node_id != 1717] + +# basin 1682 te veranderen in een LevelBoundary +model.update_node(1682, "LevelBoundary", [level_data]) + +# Alle edges die nu naar basin 1717 lopen naar LevelBoundary 1682 of opheffen +model.remove_node(27, remove_edges=True) +model.remove_node(1556, remove_edges=True) +model.remove_edges([945, 2537, 2536]) + +boundary_node = model.level_boundary.add(Node(geometry=hydroobject_gdf.at[7778, "geometry"].boundary.geoms[0])) + +model.edge.add(model.pump[642], boundary_node) +model.update_node(1202, "Outlet", data=[outlet_data]) +model.edge.add(boundary_node, model.outlet[1202]) +model.update_node(1203, "Outlet", data=[outlet_data]) +model.edge.add(boundary_node, model.outlet[1203]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2399328441 + +# Misc pump benedenstroomse edges +for edge_id in [2862, 3006, 3049]: + model.reverse_edge(edge_id=edge_id) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2399355028 + +# Misc tabulated_rating_curve (stuwen) stroomrichting +for edge_id in [1884, 2197]: + model.reverse_edge(edge_id=edge_id) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2399382478 + +# Misc manning_resistance (duikers) stroomrichting +for edge_id in [1081, 518]: + model.reverse_edge(edge_id=edge_id) + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2399425885 +# Opknippen NW boezem + +poly1, poly2 = split_basin_multi_polygon(model.basin.area.df.at[598, "geometry"], split_line_gdf.at[15, "geometry"]) +model.basin.area.df.loc[model.basin.area.df.node_id == 1681, ["geometry"]] = poly1 + +poly1, poly2 = split_basin_multi_polygon(poly2, split_line_gdf.at[16, "geometry"]) +model.basin.area.df.loc[598] = {"node_id": 1686, "geometry": poly1} + +poly1, poly2 = split_basin_multi_polygon(poly2, split_line_gdf.at[17, "geometry"]) +model.basin.area.df.loc[model.basin.area.df.index.max() + 1] = {"geometry": poly1, "node_id": 1695} +model.basin.area.df.crs = model.crs + +tables = basin_data + [basin.Area(geometry=[poly2])] +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[19608, "geometry"].boundary.geoms[1]), tables=tables) + + +model.move_node(1686, hydroobject_gdf.at[19566, "geometry"].boundary.geoms[1]) +model.merge_basins(basin_id=2426, to_basin_id=1696, are_connected=True) +model.merge_basins(basin_id=2460, to_basin_id=1696, are_connected=True) +model.merge_basins(basin_id=1648, to_basin_id=1696, are_connected=True) + +model.merge_basins(basin_id=1696, to_basin_id=2453, are_connected=True) + +model.merge_basins(basin_id=2453, to_basin_id=1686, are_connected=True) +model.merge_basins(basin_id=1719, to_basin_id=1686, are_connected=True) +model.merge_basins(basin_id=1858, to_basin_id=1686, are_connected=True) + +model.remove_node(1532, remove_edges=True) +model.remove_node(722, remove_edges=True) +model.remove_node(536, remove_edges=True) +model.remove_node(2506, remove_edges=True) + +edge_ids = [ + 2866, + 2867, + 2868, + 2869, + 2870, + 2871, + 2872, + 2873, + 2875, + 2876, + 2877, + 2878, + 2879, + 2880, + 2881, + 2883, + 2885, + 2886, + 2889, + 2890, + 2891, + 2895, + 2897, + 2899, + 2901, + 2902, + 2903, + 2905, + 2906, + 2907, + 2908, + 2910, + 2911, + 2912, + 2913, + 2915, + 2916, + 2918, +] + +for edge_id in edge_ids: + model.redirect_edge(edge_id, to_node_id=basin_node.node_id) + +model.remove_edges([2887, 2892]) +model.edge.add(basin_node, model.pump[547]) +model.edge.add(basin_node, model.outlet[540]) + +for edge_id in [2914, 2894]: + model.redirect_edge(edge_id, to_node_id=31) + +model.redirect_edge(461, to_node_id=1585) + +model.basin.area.df.loc[model.basin.area.df.node_id == 1545, ["node_id"]] = 1585 + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2400050483 + +# Ontbrekende basin beneden-Vecht +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[12057, "geometry"].boundary.geoms[0]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[12057, "geometry"].interpolate(0.5, normalized=True)), tables=[outlet_data] +) + +for edge_id in [2956, 2957, 2958, 2959, 2960, 2961]: + model.redirect_edge(edge_id, to_node_id=basin_node.node_id) + +model.remove_node(76, remove_edges=True) +model.edge.add(basin_node, model.pump[598]) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[50]) + + +# %% https://github.com/Deltares/Ribasim-NL/issues/147#issuecomment-2399931763 + +# Samenvoegen Westerveldse Aa +model.merge_basins(basin_id=1592, to_basin_id=1645, are_connected=True) +model.merge_basins(basin_id=1593, to_basin_id=1645, are_connected=True) + +model.merge_basins(basin_id=1645, to_basin_id=1585, are_connected=True) +model.merge_basins(basin_id=2567, to_basin_id=1585, are_connected=True) +model.merge_basins(basin_id=2303, to_basin_id=1585, are_connected=True) +model.merge_basins(basin_id=2549, to_basin_id=1585, are_connected=True) +model.merge_basins(basin_id=2568, to_basin_id=1585, are_connected=True) +model.merge_basins(basin_id=2572, to_basin_id=1585, are_connected=True) +model.merge_basins(basin_id=2374, to_basin_id=1585, are_connected=True) + +model.merge_basins(basin_id=2559, to_basin_id=2337, are_connected=False) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2382572457 + +# Administratie basin node_id in node_table en Basin / Area correct maken +# model.fix_unassigned_basin_area() +# model.fix_unassigned_basin_area(method="closest", distance=100) +# model.fix_unassigned_basin_area() + +# model.unassigned_basin_area.to_file("unassigned_basins.gpkg") +# model.basin.area.df = model.basin.area.df[~model.basin.area.df.node_id.isin(model.unassigned_basin_area.node_id)] + +# %% +# corrigeren knoop-topologie + +# ManningResistance bovenstrooms LevelBoundary naar Outlet +# for row in network_validator.edge_incorrect_type_connectivity().itertuples(): +# model.update_node(row.from_node_id, "Outlet", data=[outlet_data]) + +# # Inlaten van ManningResistance naar Outlet +# for row in network_validator.edge_incorrect_type_connectivity( +# from_node_type="LevelBoundary", to_node_type="ManningResistance" +# ).itertuples(): +# model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) + + +# # buffer out small slivers +# model.basin.area.df.loc[:, ["geometry"]] = ( +# model.basin.area.df.buffer(0.1) +# .buffer(-0.1) +# .apply(lambda x: x if x.geom_type == "MultiPolygon" else MultiPolygon([x])) +# ) +# %% +# basin-profielen updaten + +df = pd.DataFrame( + { + "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), + "level": [0.0, 1.0] * len(model.basin.node.df), + "area": [0.01, 1000.0] * len(model.basin.node.df), + } +) +df.index.name = "fid" +model.basin.profile.df = df + +df = model.basin.profile.df.groupby("node_id")[["level"]].max().reset_index() +df.index.name = "fid" +model.basin.state.df = df + +# %% +# tabulated_rating_curves updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.tabulated_rating_curve.node.df.index.to_numpy(), 2), + "level": [0.0, 5] * len(model.tabulated_rating_curve.node.df), + "flow_rate": [0, 0.1] * len(model.tabulated_rating_curve.node.df), + } +) +df.index.name = "fid" +model.tabulated_rating_curve.static.df = df + + +# %% + +# level_boundaries updaten +df = pd.DataFrame( + { + "node_id": model.level_boundary.node.df.index.to_list(), + "level": [0.0] * len(model.level_boundary.node.df), + } +) +df.index.name = "fid" +model.level_boundary.static.df = df + +# %% +# manning_resistance updaten +length = len(model.manning_resistance.node.df) +df = pd.DataFrame( + { + "node_id": model.manning_resistance.node.df.index.to_list(), + "length": [100.0] * length, + "manning_n": [100.0] * length, + "profile_width": [100.0] * length, + "profile_slope": [100.0] * length, + } +) +df.index.name = "fid" +model.manning_resistance.static.df = df + + +# %% write model +model.use_validation = False +model.write(ribasim_toml) + +# %% diff --git a/notebooks/vechtstromen/00_get_model.py b/notebooks/vechtstromen/00_get_model.py new file mode 100644 index 0000000..6964055 --- /dev/null +++ b/notebooks/vechtstromen/00_get_model.py @@ -0,0 +1,15 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +model_url = cloud.joinurl("Vechtstromen", "modellen", "Vechtstromen_2024_6_3") + +# %% +cloud.download_content(model_url) + + +# %% rename, so we can seperate in QGIS +ribasim_toml = cloud.joinpath("Vechtstromen", "modellen", "Vechtstromen_2024_6_3", "model.toml") +if ribasim_toml.exists(): + ribasim_toml.rename(ribasim_toml.with_name("vechtstromen.toml")) diff --git a/notebooks/vechtstromen/00_get_verwerkt.py b/notebooks/vechtstromen/00_get_verwerkt.py new file mode 100644 index 0000000..5cdc8b3 --- /dev/null +++ b/notebooks/vechtstromen/00_get_verwerkt.py @@ -0,0 +1,9 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +data_url = cloud.joinurl("DeDommel", "verwerkt") + +# %% +cloud.download_content(data_url) diff --git a/notebooks/vechtstromen/01_fix_model_network.py b/notebooks/vechtstromen/01_fix_model_network.py new file mode 100644 index 0000000..7d94912 --- /dev/null +++ b/notebooks/vechtstromen/01_fix_model_network.py @@ -0,0 +1,1028 @@ +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin, level_boundary, manning_resistance, outlet +from ribasim_nl import CloudStorage, Model, NetworkValidator +from ribasim_nl.geometry import edge, split_basin, split_basin_multi_polygon +from shapely.geometry import LineString, MultiPolygon, Point, Polygon +from shapely.ops import nearest_points + +cloud = CloudStorage() + +ribasim_toml = cloud.joinpath("Vechtstromen", "modellen", "Vechtstromen_2024_6_3", "vechtstromen.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") +hydroobject_gdf = gpd.read_file( + cloud.joinpath("Vechtstromen", "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject", fid_as_index=True +) + +split_line_gdf = gpd.read_file( + cloud.joinpath("Vechtstromen", "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True +) + +level_boundary_gdf = gpd.read_file( + cloud.joinpath("Vechtstromen", "verwerkt", "fix_user_data.gpkg"), layer="level_boundary", fid_as_index=True +) + +# %% read model +model = Model.read(ribasim_toml) +ribasim_toml = cloud.joinpath("Vechtstromen", "modellen", "Vechtstromen_fix_model_network", "vechtstromen.toml") +network_validator = NetworkValidator(model) + +# %% some stuff we'll need again +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +level_data = level_boundary.Static(level=[0]) + +basin_data = [ + basin.Profile(level=[0.0, 1.0], area=[0.01, 1000.0]), + basin.Static( + drainage=[0.0], + potential_evaporation=[0.001 / 86400], + infiltration=[0.0], + precipitation=[0.005 / 86400], + ), + basin.State(level=[0]), +] +outlet_data = outlet.Static(flow_rate=[100]) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2385111465 + +# Verwijderen duplicate edges + +model.edge.df.drop_duplicates(inplace=True) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2352686763 + +# Toevoegen benedenstroomse randvoorwaarden Beneden Dinkel + +# verander basin met node_id 2250 naar type level_boundary +model.update_node(2250, "LevelBoundary", data=[level_data]) + + +# verplaats basin 1375 naar het hydroobject +node_id = 1375 + +model.basin.node.df.loc[node_id, "geometry"] = hydroobject_gdf.at[3135, "geometry"].interpolate(0.5, normalized=True) +edge_ids = model.edge.df[ + (model.edge.df.from_node_id == node_id) | (model.edge.df.to_node_id == node_id) +].index.to_list() +model.reset_edge_geometry(edge_ids=edge_ids) + +# verplaats basin 1375 naar het hydroobject + + +# verbind basins met level_boundaries +for fid, node_id in [(1, 1375), (2, 1624)]: + boundary_node_geometry = level_boundary_gdf.at[fid, "geometry"] + + # line for interpolation + basin_node_geometry = Point( + model.basin.node.df.at[node_id, "geometry"].x, model.basin.node.df.at[node_id, "geometry"].y + ) + line_geometry = LineString((basin_node_geometry, boundary_node_geometry)) + + # define level_boundary_node + boundary_node = model.level_boundary.add(Node(geometry=boundary_node_geometry), tables=[level_data]) + level_node = model.level_boundary.add(Node(geometry=boundary_node_geometry), tables=[level_data]) + + # define manning_resistance_node + outlet_node_geometry = line_geometry.interpolate(line_geometry.length - 20) + outlet_node = model.outlet.add(Node(geometry=outlet_node_geometry), tables=[outlet_data]) + + from_node_id = model.basin[node_id].node_id + to_node_id = outlet_node.node_id + + # draw edges + # FIXME: we force edges to be z-less untill this is solved: https://github.com/Deltares/Ribasim/issues/1854 + model.edge.add( + model.basin[node_id], outlet_node, geometry=edge(model.basin[node_id].geometry, outlet_node.geometry) + ) + model.edge.add(outlet_node, boundary_node, geometry=edge(outlet_node.geometry, boundary_node.geometry)) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2382565944 + +# Verwijderen Twentekanaal (zit al bij RWS-HWS) +remove_node_ids = [1562, 1568, 1801, 1804, 1810, 1900, 2114, 2118, 2119, 32] + +# remove by edge so we also remove all resistance nodes in between +edge_df = model.edge.df[ + model.edge.df.from_node_id.isin(remove_node_ids) | model.edge.df.to_node_id.isin(remove_node_ids) +][["from_node_id", "to_node_id"]] + +for row in edge_df.itertuples(): + model.remove_edge(from_node_id=row.from_node_id, to_node_id=row.to_node_id, remove_disconnected_nodes=True) + +# add level_boundaries at twentekanaal for later coupling +hws_model = Model.read(cloud.joinpath("Rijkswaterstaat", "modellen", "hws", "hws.toml")) +basin_ids = hws_model.node_table().df[hws_model.node_table().df.name.str.contains("Twentekanaal")].index.to_list() +twentekanaal_poly = hws_model.basin.area.df[hws_model.basin.area.df.node_id.isin(basin_ids)].union_all() + +connect_node_ids = [ + i for i in set(edge_df[["from_node_id", "to_node_id"]].to_numpy().flatten()) if i in model._used_node_ids +] + +for node_id in connect_node_ids: + node = model.get_node(node_id=node_id) + + # update node to Outlet if it's a manning resistance + if node.node_type == "ManningResistance": + model.update_node(node.node_id, "Outlet", data=[outlet_data]) + node = model.get_node(node_id=node_id) + + _, boundary_node_geometry = nearest_points(node.geometry, twentekanaal_poly.boundary) + + boundary_node = model.level_boundary.add(Node(geometry=boundary_node_geometry), tables=[level_data]) + + # draw edge in the correct direction + if model.edge.df.from_node_id.isin([node_id]).any(): # supply + model.edge.add(boundary_node, node, geometry=edge(boundary_node.geometry, node.geometry)) + else: + model.edge.add(node, boundary_node, geometry=edge(node.geometry, boundary_node.geometry)) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2385525533 + +# Opruimen situatie rondom gemaal Oude Drostendiep + +# pumps met node_id 639, 608 en 603 op te heffen (1 gemaal ipv 3) +remove_node_ids = [639, 608, 603] + +for node_id in remove_node_ids: + model.remove_node(node_id, remove_edges=True) + +# remove by edge so we also remove all resistance nodes in between +edge_df = model.edge.df[ + model.edge.df.from_node_id.isin(remove_node_ids) | model.edge.df.to_node_id.isin(remove_node_ids) +][["from_node_id", "to_node_id"]] + +for row in edge_df.itertuples(): + model.remove_edge(from_node_id=row.from_node_id, to_node_id=row.to_node_id, remove_disconnected_nodes=True) + +# basin met node_id 1436 te verplaatsen naar locatie basin node_id 2259 +basin_id = 1436 +model.basin.node.df.loc[basin_id, "geometry"] = model.basin[2259].geometry +edge_ids = model.edge.df[ + (model.edge.df.from_node_id == basin_id) | (model.edge.df.to_node_id == basin_id) +].index.to_list() + +model.reset_edge_geometry(edge_ids=edge_ids) + +# basin met node_id 2259 opheffen (klein niets-zeggend bakje) +model.remove_node(2259, remove_edges=True) + +# stuw ST05005 (node_id 361) verbinden met basin met node_id 1436 +model.edge.add(model.tabulated_rating_curve[361], model.basin[basin_id]) +model.edge.add(model.basin[basin_id], model.pump[635]) + +# basin met node_id 2250 verplaatsen naar logische plek bovenstrooms ST05005 en bendenstrooms ST02886 op hydroobjec +basin_id = 2255 +model.basin.node.df.loc[basin_id, ["geometry"]] = hydroobject_gdf.at[6444, "geometry"].interpolate(0.5, normalized=True) + +edge_ids = model.edge.df[ + (model.edge.df.from_node_id == basin_id) | (model.edge.df.to_node_id == basin_id) +].index.to_list() + +model.reset_edge_geometry(edge_ids=edge_ids) + +model.split_basin(split_line_gdf.at[9, "geometry"]) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2385409772 + +incorrect_edges_df = network_validator.edge_incorrect_connectivity() +false_basin_ids = [1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1369, 1370] + +for false_basin_id in false_basin_ids: + basin_geom = ( + incorrect_edges_df[incorrect_edges_df.from_node_id == false_basin_id].iloc[0].geometry.boundary.geoms[0] + ) + basin_node = model.basin.add(Node(geometry=basin_geom), tables=basin_data) + + # fix edge topology + model.edge.df.loc[ + incorrect_edges_df[incorrect_edges_df.from_node_id == false_basin_id].index.to_list(), ["from_node_id"] + ] = basin_node.node_id + + model.edge.df.loc[ + incorrect_edges_df[incorrect_edges_df.to_node_id == false_basin_id].index.to_list(), ["to_node_id"] + ] = basin_node.node_id + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2386671759 + + +# basin 2224 en manning_resistance 898 (DK28491) opheffen +# tabulated_rating_cuves 336 (ST03745) en 238 (ST03744) opheffen +remove_node_ids = [2224, 898, 336, 238] + +for node_id in remove_node_ids: + model.remove_node(node_id, remove_edges=True) + +# pump 667 (GM00088) verbinden met basin 1495 +model.edge.add(model.pump[667], model.basin[1495]) + +# model.basin.area.df = model.basin.area.df[model.basin.area.df.node_id.isin(model.unassigned_basin_area.node_id)] + + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2387026622 + +# opruimen basin at Amsterdamscheveld +model.remove_node(1683, remove_edges=True) + +# verbinden basin node_id 1680 met tabulated_rating_curve node_id 101 en 125 +model.edge.add(model.basin[1680], model.tabulated_rating_curve[101]) +model.edge.add(model.basin[1680], model.tabulated_rating_curve[125]) + +# verbinden pump node_id 622 met basin node_id 1680 +model.edge.add(model.pump[622], model.basin[1680]) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2387056481 + +# Fix stieltjeskanaal + +# split basin_area bij manning_resistance node_id 1365 + +line = split_line_gdf.at[1, "geometry"] + +basin_polygon = model.basin.area.df.at[8, "geometry"].geoms[0] +basin_polygons = split_basin(basin_polygon, line) + +model.basin.area.df.loc[8, ["geometry"]] = MultiPolygon([basin_polygons.geoms[0]]) +model.basin.area.df.loc[model.basin.area.df.index.max() + 1, ["geometry"]] = MultiPolygon([basin_polygons.geoms[1]]) + +# hef basin node_id 1901 op +model.remove_node(1901, remove_edges=True) + +# hef pump node_id 574 (GM00246) en node_id 638 (GM00249) op +model.remove_node(574, remove_edges=True) +model.remove_node(638, remove_edges=True) + +# verbind basin node_id 1876 met pump node_ids 626 (GM00248) en 654 (GM00247) +model.edge.add(model.basin[1876], model.pump[626]) +model.edge.add(model.basin[1876], model.pump[654]) +model.edge.add(model.tabulated_rating_curve[113], model.basin[1876]) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2387815013 + +# opruimen Zwinderskanaal + + +# split basin_area bij rode lijn +line = split_line_gdf.at[2, "geometry"] +basin_polygon = model.basin.area.df.at[65, "geometry"].geoms[0] +basin_polygons = split_basin(basin_polygon, line) + +model.basin.area.df.loc[65, ["geometry"]] = MultiPolygon([basin_polygons.geoms[0]]) +model.basin.area.df.loc[model.basin.area.df.index.max() + 1, ["geometry"]] = MultiPolygon([basin_polygons.geoms[1]]) + +# verwijderen basin 2226, 2258, 2242 en manning_resistance 1366 +for node_id in [2226, 2258, 2242, 1366, 1350]: + model.remove_node(node_id, remove_edges=True) + +# verbinden tabulated_rating_curves 327 (ST03499) en 510 (ST03198) met basin 1897 +model.edge.add(model.basin[1897], model.tabulated_rating_curve[327]) +model.edge.add(model.tabulated_rating_curve[510], model.basin[1897]) + +# verbinden basin 1897 met tabulated_rating_curve 279 (ST03138) +model.edge.add(model.basin[1897], model.tabulated_rating_curve[279]) + +# verbinden basin 1897 met manning_resistance 1351 en 1352 +model.edge.add(model.basin[1897], model.manning_resistance[1351]) +model.edge.add(model.basin[1897], model.manning_resistance[1352]) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2387888742 + +# Oplossen situatie Van Echtenskanaal/Scholtenskanaal Klazinaveen + +# verplaatsen level_boundary 33 naar splitsing scholtenskanaal/echtenskanaal en omzetten naar basin +outlet_node_geometry = model.level_boundary[33].geometry +model.update_node(33, "Basin", data=basin_data) +model.move_node(node_id=33, geometry=hydroobject_gdf.loc[2679].geometry.boundary.geoms[1]) + +# plaatsen outlet bij oorspronkelijke plaats level_boundary 33 +outlet_node = model.outlet.add(Node(geometry=outlet_node_geometry), tables=[outlet_data]) + +# plaatsen nieuwe level_boundary op scholtenskanaal aan H&A zijde scholtenskanaal van outlet +boundary_node = model.level_boundary.add(Node(geometry=level_boundary_gdf.at[3, "geometry"]), tables=[level_data]) + +# toevoegen edges vanaf nieuwe basin 33 naar nieuwe outlet naar nieuwe boundary +model.edge.add(model.basin[33], outlet_node, geometry=edge(model.basin[33].geometry, outlet_node.geometry)) +model.edge.add(outlet_node, boundary_node) + +# opheffen manning_resistance 1330 bij GM00213 +model.remove_node(1330, remove_edges=True) + +# verbinden nieuwe basin met outlet en oorspronkijke manning_knopen en pompen in oorspronkelijke richting +for edge_id in [2711, 2712, 2713, 2714, 2708]: + model.reverse_edge(edge_id=edge_id) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2388009499 + +# basin met node_id 1873 gaat richting geesburg +model.move_node(node_id=1873, geometry=hydroobject_gdf.loc[6616].geometry.boundary.geoms[1]) +model.basin.area.df.loc[model.basin.area.df.node_id == 1873, ["node_id"]] = pd.NA +# ege 2700, 2701, 2702 worden opgeheven +model.edge.df = model.edge.df[~model.edge.df.index.isin([2700, 2701, 2702])] + +# basin 1873 wordt verbonden met manning_resistance 1054 +model.edge.add( + model.basin[1873], + model.manning_resistance[1054], + geometry=edge(model.basin[1873].geometry, model.manning_resistance[1054].geometry), +) + +# manning_resistance 1308 en 1331 worden verbonden met basin 1873 +model.edge.add( + model.manning_resistance[1308], + model.basin[1873], + geometry=edge(model.manning_resistance[1308].geometry, model.basin[1873].geometry), +) +model.edge.add( + model.basin[1873], + model.manning_resistance[1331], + geometry=edge(model.basin[1873].geometry, model.manning_resistance[1331].geometry), +) + +# level_boundary 26 wordt een outlet +model.update_node(26, "Outlet", data=[outlet_data]) + +# nieuwe level_boundary benedenstrooms nieuwe outlet 26 +boundary_node = model.level_boundary.add(Node(geometry=level_boundary_gdf.at[4, "geometry"]), tables=[level_data]) + + +# basin 1873 wordt verbonden met outlet en outlet met level_boundary +model.edge.add( + model.outlet[26], + model.basin[1873], + geometry=edge(model.outlet[26].geometry, model.basin[1873].geometry), +) + +model.edge.add(boundary_node, model.outlet[26]) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2388334544 + +# Kruising Dinkel/Kanaal Almelo Nordhorn corrigeren + +# ege 2700, 2701, 2702 worden opgeheven +model.edge.df = model.edge.df[~model.edge.df.index.isin([2690, 2691, 2692, 2693, 2694, 2695, 2696])] + +# basin / area splitten bij rode lijn in twee vlakken +line = split_line_gdf.at[3, "geometry"] + +total_basin_polygon = model.basin.area.df.at[544, "geometry"] +basin_polygon = [i for i in model.basin.area.df.at[544, "geometry"].geoms if i.intersects(line)][0] +basin_polygons = split_basin(basin_polygon, line) +model.basin.area.df.loc[544, ["geometry"]] = MultiPolygon( + [i for i in model.basin.area.df.at[544, "geometry"].geoms if not i.intersects(line)] + [basin_polygons.geoms[0]] +) +model.basin.area.df.loc[model.basin.area.df.index.max() + 1, ["geometry"]] = MultiPolygon([basin_polygons.geoms[1]]) + +# basin op dinkel bovenstrooms kanaal +dinkel_basin_node = model.basin.add( + Node(geometry=hydroobject_gdf.loc[2966].geometry.boundary.geoms[1]), tables=basin_data +) + +# basin in kanaal +kanaal_basin_node = model.basin.add( + Node(geometry=hydroobject_gdf.loc[7720].geometry.boundary.geoms[1]), tables=basin_data +) + +# edges v.a. tabulated_rating_curve 298 (ST01865) en 448 (ST01666) naar dinkel-basin +model.edge.add( + model.tabulated_rating_curve[298], + dinkel_basin_node, + geometry=edge(model.tabulated_rating_curve[298].geometry, dinkel_basin_node.geometry), +) + +model.edge.add( + model.tabulated_rating_curve[448], + dinkel_basin_node, + geometry=edge(model.tabulated_rating_curve[448].geometry, dinkel_basin_node.geometry), +) + +# edge v.a. manning_resistance 915 naar dinkel basin +model.edge.add( + model.manning_resistance[915], + dinkel_basin_node, + geometry=edge(model.manning_resistance[915].geometry, dinkel_basin_node.geometry), +) + +# edges v.a. dinkel basin naar tabulate_rating_curves 132 (ST02129) en 474 (ST02130) +model.edge.add( + dinkel_basin_node, + model.tabulated_rating_curve[132], + geometry=edge(dinkel_basin_node.geometry, model.tabulated_rating_curve[132].geometry), +) + +model.edge.add( + dinkel_basin_node, + model.tabulated_rating_curve[474], + geometry=edge(dinkel_basin_node.geometry, model.tabulated_rating_curve[474].geometry), +) + +# nieuwe manning_resistance in nieuwe dinkel-basin bovenstrooms kanaal +manning_node = model.manning_resistance.add( + Node(geometry=hydroobject_gdf.at[7721, "geometry"].interpolate(0.5, normalized=True)), tables=[manning_data] +) + +# nieuwe basin verbinden met nieuwe manning_resistance en nieuw kanaal basin +model.edge.add( + dinkel_basin_node, + manning_node, + geometry=edge(dinkel_basin_node.geometry, manning_node.geometry), +) + +model.edge.add( + manning_node, + kanaal_basin_node, + geometry=edge(manning_node.geometry, kanaal_basin_node.geometry), +) + +# nieuw kanaal-basin vervinden met tabulated_rating_curve 471 (ST01051) +model.edge.add( + kanaal_basin_node, + model.tabulated_rating_curve[471], + geometry=edge(kanaal_basin_node.geometry, model.tabulated_rating_curve[471].geometry), +) + +# nieuw kanaal-basin vervinden met manning_resistance 1346 +model.edge.add( + kanaal_basin_node, + model.manning_resistance[1346], + geometry=edge(kanaal_basin_node.geometry, model.manning_resistance[1346].geometry), +) + +# nieuwe outletlet bij grensduiker kanaal +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[7746, "geometry"].boundary.geoms[0]), tables=[outlet_data] +) + +# nieuwe basin verbinden met outlet verbinden met level_boundary 21 +model.edge.add( + outlet_node, + kanaal_basin_node, + geometry=edge(outlet_node.geometry, kanaal_basin_node.geometry), +) + +model.edge.add( + model.level_boundary[21], + outlet_node, + geometry=edge(model.level_boundary[21].geometry, outlet_node.geometry), +) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2389192454 +model.reverse_edge(edge_id=2685) +model.remove_node(node_id=2229, remove_edges=True) +model.edge.add( + model.basin[1778], + model.outlet[1080], + geometry=edge(model.basin[1778].geometry, model.outlet[1080].geometry), +) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2389198178 +model.reverse_edge(edge_id=2715) +model.reverse_edge(edge_id=2720) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2390712613 + +# Oplossen toplogische situatie kanaal Coevorden + +# opheffen basin 2243 en basin 2182 +model.remove_node(2243, remove_edges=True) +model.remove_node(2182, remove_edges=True) +model.remove_node(1351, remove_edges=True) +model.remove_node(1268, remove_edges=True) +model.remove_node(1265, remove_edges=True) + +# onknippen basin bij rode lijn +line = split_line_gdf.at[4, "geometry"] +basin_area_row = model.basin.area.df[model.basin.area.df.contains(line.centroid)].iloc[0] +basin_area_index = basin_area_row.name +basin_polygon = basin_area_row.geometry.geoms[0] +basin_polygons = split_basin(basin_polygon, line) + +model.basin.area.df.loc[basin_area_index, ["geometry"]] = MultiPolygon([basin_polygons.geoms[1]]) +model.basin.area.df.loc[model.basin.area.df.index.max() + 1, ["geometry"]] = MultiPolygon([basin_polygons.geoms[0]]) + +# # verplaatsen basin 1678 naar kruising waterlopen +model.move_node(node_id=1678, geometry=hydroobject_gdf.loc[6594].geometry.boundary.geoms[1]) + +# verwijderen edges 809, 814, 807, 810, 1293, 2772 +model.edge.df = model.edge.df[~model.edge.df.index.isin([809, 814, 807, 810, 887])] + + +# verbinden manning 1270, 1127 en pumps 644, 579 en 649 met basin 1678 +for node_id in [1270, 1127]: + model.edge.add( + model.manning_resistance[node_id], + model.basin[1678], + geometry=edge(model.manning_resistance[node_id].geometry, model.basin[1678].geometry), + ) + +for node_id in [644, 579, 649]: + model.edge.add( + model.pump[node_id], + model.basin[1678], + geometry=edge(model.pump[node_id].geometry, model.basin[1678].geometry), + ) + +# verplaatsen manning 1267 naar basin-edge tussen 1678 en 1678 +model.move_node(node_id=1267, geometry=hydroobject_gdf.loc[6609].geometry.boundary.geoms[1]) + +# maak nieuwe manning-node tussen 1678 en 1897 +manning_node = model.manning_resistance.add( + Node(geometry=hydroobject_gdf.loc[6596].geometry.interpolate(0.5, normalized=True)), tables=[manning_data] +) + +# verbinden basin 1897 met manning-node +model.edge.add( + model.basin[1897], + manning_node, + geometry=edge(model.basin[1897].geometry, manning_node.geometry), +) + +model.edge.add( + manning_node, + model.basin[1678], + geometry=edge(manning_node.geometry, model.basin[1678].geometry), +) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2390952469 + +# Schoonebekerdiep v.a. Twist Bült + +# verplaatsen basin 1909 nabij tabulated_rating_curve 383 (ST03607) +model.move_node(1909, geometry=hydroobject_gdf.loc[6865].geometry.boundary.geoms[1]) + +# verwijderen edges 780 en 778 +model.edge.df = model.edge.df[~model.edge.df.index.isin([780, 778])] + +# toevoegen edge tussen tabulated_rating_curve 383 en basin 1909 +model.edge.add( + model.tabulated_rating_curve[383], + model.basin[1909], + geometry=edge(model.tabulated_rating_curve[383].geometry, model.basin[1909].geometry), +) + +# toevoegen edge tussen manning_resistance 851 en basin 1909 +model.edge.add( + model.manning_resistance[851], + model.basin[1909], + geometry=edge(model.manning_resistance[851].geometry, model.basin[1909].geometry), +) + +# opknippen basin 1538 nabij 1909 en verbinden basin 1909 met 1539 via nieuwe manning_knoop +line = split_line_gdf.at[5, "geometry"] +model.split_basin(line=line) +manning_node = model.manning_resistance.add( + Node(geometry=line.intersection(hydroobject_gdf.at[6866, "geometry"])), tables=[manning_data] +) + +model.edge.add(model.basin[1909], manning_node) +model.edge.add(manning_node, model.basin[1539], geometry=edge(manning_node.geometry, model.basin[1539].geometry)) + +# verwijderen edge 2716,2718,2718,2719 +model.edge.df = model.edge.df[~model.edge.df.index.isin([2716, 2717, 2718, 2719])] + +# opknippen basin 2181 nabij 1881 en verbinden basin 1881 met 2181 via nieuwe manning_knoop +model.move_node(1881, geometry=hydroobject_gdf.loc[6919].geometry.boundary.geoms[1]) +line = split_line_gdf.at[6, "geometry"] +model.split_basin(line=line) +manning_node = model.manning_resistance.add( + Node(geometry=line.intersection(hydroobject_gdf.at[6879, "geometry"])), tables=[manning_data] +) + +model.edge.add(model.basin[1881], manning_node) +model.edge.add(manning_node, model.basin[2181], geometry=edge(manning_node.geometry, model.basin[2181].geometry)) + +for node_id in [139, 251, 267, 205]: + model.edge.add( + model.tabulated_rating_curve[node_id], + model.basin[1881], + geometry=edge(model.tabulated_rating_curve[node_id].geometry, model.basin[1881].geometry), + ) + +model.move_node(1269, hydroobject_gdf.at[7749, "geometry"].interpolate(0.5, normalized=True)) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391168839 + +# Molengoot Hardenberg + +# opheffen basin 1903 +model.remove_node(1903, remove_edges=True) + +# verbinden basin 1433 met pump 621 +model.edge.add( + model.basin[1433], + model.pump[621], + geometry=edge(model.basin[1433].geometry, model.pump[621].geometry), +) + +# verbinden tabulated_rating_curves 99 en 283 met basin 1433 +for node_id in [99, 283]: + model.edge.add( + model.tabulated_rating_curve[node_id], + model.basin[1433], + geometry=edge(model.tabulated_rating_curve[node_id].geometry, model.basin[1433].geometry), + ) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2390898004 + +model.remove_node(1131, remove_edges=True) +model.remove_node(1757, remove_edges=True) +model.edge.add( + model.basin[1588], + model.tabulated_rating_curve[112], + geometry=edge(model.basin[1588].geometry, model.tabulated_rating_curve[112].geometry), +) +model.edge.add( + model.basin[1588], + model.manning_resistance[57], + geometry=edge(model.basin[1588].geometry, model.manning_resistance[57].geometry), +) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391191673 + +# verwijderen basin 1905 +model.remove_node(1905, remove_edges=True) + +# verbinden manning_resistance 995 met basin 2148 +model.edge.add( + model.basin[2148], + model.manning_resistance[995], +) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391460899 + +# Samenvoegen basin-knopen Overijsselse Vecht & Coevorden Vechtkanaal +for basin_id in [1845, 2244, 2006, 1846]: + model.merge_basins(basin_id=basin_id, to_basin_id=2222) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391666745 + +# Opruimen basins Nieuw-Amsterdam + +# opknippen basin 1611 bij rode lijn, area mergen met basin 1879 +basin_polygon = model.basin.area.df.set_index("node_id").at[1611, "geometry"] +left_poly, right_poly = split_basin_multi_polygon(basin_polygon, split_line_gdf.at[8, "geometry"]) +model.basin.area.df.loc[model.basin.area.df.node_id == 1611, ["geometry"]] = right_poly + +left_poly = model.basin.area.df.set_index("node_id").at[1879, "geometry"].union(left_poly) +model.basin.area.df.loc[model.basin.area.df.node_id == 1879, ["geometry"]] = MultiPolygon([left_poly]) + +# merge basins 2186, 2173, 2022, 1611, 2185 in basin 1902 +for basin_id in [2186, 2173, 2022, 1611, 2185]: + model.merge_basins(basin_id=basin_id, to_basin_id=1902) + +# verplaats 1902 iets bovenstrooms +model.move_node(1902, hydroobject_gdf.at[6615, "geometry"].boundary.geoms[1]) + + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391686269 +model.remove_node(2198, remove_edges=True) +model.remove_node(2200, remove_edges=True) +model.edge.add(model.basin[2111], model.pump[671]) +model.edge.add(model.tabulated_rating_curve[542], model.basin[2111]) +model.edge.add(model.pump[671], model.basin[2316]) +model.edge.add(model.basin[2316], model.tabulated_rating_curve[542]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391710413 + +model.remove_node(2202, remove_edges=True) +model.edge.add(model.basin[1590], model.pump[657]) +model.edge.add(model.manning_resistance[1058], model.basin[1590]) + + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391672700 + +# Merge basin 2176 in 1605 +model.merge_basins(basin_id=2176, to_basin_id=1605) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391726774 +# Merge basins 2206 in 1518 +model.merge_basins(basin_id=2206, to_basin_id=1518, are_connected=False) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391734144 +# dood takje uit Overijsselse Vecht +model.remove_node(2210, remove_edges=True) +model.remove_node(1294, remove_edges=True) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391740603 + +# Merge basin 2225 met 2304 +model.merge_basins(2225, 2304) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391815016 + +# Wetteringe als laterale inflow +model.merge_basins(2231, 1853) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391750536 + +# Rondom SL00010 opruimen +model.remove_node(2230, remove_edges=True) +model.remove_node(2251, remove_edges=True) +model.edge.add(model.outlet[41], model.level_boundary[15]) +model.edge.add(model.basin[1442], model.pump[664]) +model.edge.add(model.basin[1442], model.pump[665]) + + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391820198 + +# Merge basin 2232 in 1591 +model.merge_basins(basin_id=2232, to_basin_id=1591) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391825301 + +# Basin 2236 naar LevelBoundary +model.update_node(2236, "LevelBoundary", data=[level_data]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391829471 + +# Merge basin 2246 en 1419 +model.merge_basins(basin_id=2246, to_basin_id=1419) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391946915 + +# Opruimen Elsbeek + +# Basin 2256 verplaatsen naar punt +model.move_node(node_id=2256, geometry=hydroobject_gdf.loc[2896].geometry.boundary.geoms[1]) + +# Basin knippen over lijn +model.basin.area.df.loc[model.basin.area.df.node_id == 2256, ["node_id"]] = pd.NA +model.split_basin(split_line_gdf.at[10, "geometry"]) + +# Edges 446, 1516, 443 en 444 verwijderen +model.edge.df = model.edge.df[~model.edge.df.index.isin([446, 1516, 443, 444])] + +# tabulated_rating_curves 202 en 230 verbinden met basin 2256 +model.edge.add( + model.tabulated_rating_curve[202], + model.basin[2256], + geometry=edge(model.tabulated_rating_curve[202].geometry, model.basin[2256].geometry), +) +model.edge.add( + model.tabulated_rating_curve[230], + model.basin[2256], + geometry=edge(model.tabulated_rating_curve[230].geometry, model.basin[2256].geometry), +) + +# resistance 954 verbinden met basin 2256 +model.edge.add( + model.manning_resistance[954], + model.basin[2256], + geometry=edge(model.manning_resistance[954].geometry, model.basin[2256].geometry), +) + +# basin 2256 verbinden met resistance 1106 +model.edge.add( + model.basin[2256], + model.manning_resistance[1106], + geometry=edge(model.basin[2256].geometry, model.manning_resistance[1106].geometry), +) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391984234 + +# Merge basin 2261 in basin 1698 +model.merge_basins(2261, 1698) +model.remove_node(390, remove_edges=True) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2391995841 + +# Merge basin 2260 met basin 1645 +model.merge_basins(2260, 1645) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392010526 +# Merge basin 2220 met basin 1371 +model.merge_basins(2220, 1371, are_connected=False) + + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392017041 + +# Kanaal Almelo Nordhorn bij Almelo +model.merge_basins(2219, 1583, are_connected=False) +model.merge_basins(2209, 1583, are_connected=False) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392022887 + +# Merge basin 2203 met 2227 +model.merge_basins(2203, 2227, are_connected=False) +model.remove_node(1219, remove_edges=True) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392026739 + +# Merge basin 2014 met 2144 +model.merge_basins(2014, 2144) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392030268 + +# Merge basin 1696 met 1411 +model.merge_basins(1696, 1411) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392037263 + +# Merge basin 2264 met 1459 +model.merge_basins(2264, 1459) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392043973 + +# Merge basin 2212 en 2310 +model.merge_basins(2212, 2310) +poly = model.basin.area.df.at[59, "geometry"].union(model.basin.area.df.set_index("node_id").at[2310, "geometry"]) +model.basin.area.df.loc[model.basin.area.df.node_id == 2310, ["geometry"]] = MultiPolygon([poly]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392048684 + +# Merge basin 2253 in basin 2228 +model.merge_basins(2253, 2228) + + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392052379 + +# Merge basin 2221 in basin 1634 +model.merge_basins(2221, 1634) + +# %% https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2392076634 + +# Verbinding rondwaterleiding / Lennelwaterleiding herstellen +model.merge_basins(1859, 2235, are_connected=False) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2382572457 + +# Administratie basin node_id in node_table en Basin / Area correct maken +model.fix_unassigned_basin_area() +model.fix_unassigned_basin_area(method="closest", distance=100) +model.fix_unassigned_basin_area() + +model.unassigned_basin_area.to_file("unassigned_basins.gpkg") +model.basin.area.df = model.basin.area.df[~model.basin.area.df.node_id.isin(model.unassigned_basin_area.node_id)] + +# %% +# corrigeren knoop-topologie + +# ManningResistance bovenstrooms LevelBoundary naar Outlet +for row in network_validator.edge_incorrect_type_connectivity().itertuples(): + model.update_node(row.from_node_id, "Outlet", data=[outlet_data]) + +# Inlaten van ManningResistance naar Outlet +for row in network_validator.edge_incorrect_type_connectivity( + from_node_type="LevelBoundary", to_node_type="ManningResistance" +).itertuples(): + model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/146#issuecomment-2382578661 + +# opvullen gaten +basin_polygon = model.basin.area.df.union_all() +# holes = [Polygon(interior) for polygon in basin_polygon.buffer(10).buffer(-10).geoms for interior in polygon.interiors] +holes = [Polygon(interior) for interior in basin_polygon.buffer(10).buffer(-10).interiors] +holes_df = gpd.GeoSeries(holes, crs=28992) +holes_df.index = holes_df.index + 1 +holes_df.to_file( + "holes.gpkg", + index=True, + fid="fid", +) +# splitsen Alemelo-Nordhorn / Overijsselskanaal. Overijsselskanaal zit in HWS +line = split_line_gdf.at[12, "geometry"] +idx = holes_df[holes_df.intersects(line)].index[0] +poly = split_basin(holes_df[holes_df.intersects(line)].iloc[0], line).geoms[0] +poly = model.basin.area.df.set_index("node_id").at[1583, "geometry"].union(poly) +model.basin.area.df.loc[model.basin.area.df.node_id == 1583, ["geometry"]] = MultiPolygon([poly]) + +# Split Overijsselskanaal bij Zwolsekanaal +line = split_line_gdf.at[13, "geometry"] +poly1, poly2 = split_basin(holes_df[holes_df.intersects(line)].iloc[0], line).geoms + +poly1 = model.basin.area.df.set_index("node_id").at[2116, "geometry"].union(poly1) +poly1 = MultiPolygon([i for i in poly1.geoms if i.geom_type == "Polygon"]) +model.basin.area.df.loc[model.basin.area.df.node_id == 2116, ["geometry"]] = poly1 + +poly2 = model.basin.area.df.set_index("node_id").at[2115, "geometry"].union(poly2) +poly2 = MultiPolygon([i for i in poly2.geoms if i.geom_type == "Polygon"] + [holes_df.loc[38], holes_df.loc[29]]) +model.basin.area.df.loc[model.basin.area.df.node_id == 2115, ["geometry"]] = poly2 + +# de rest gaan we automatisch vullen +holes_df = holes_df[~holes_df.index.isin([10, 22, 29, 32, 38, 39, 41])] + +holes_df.to_file( + "holes.gpkg", + index=True, + fid="fid", +) + +drainage_areas_df = gpd.read_file( + cloud.joinpath("Vechtstromen", "verwerkt", "4_ribasim", "areas.gpkg"), layer="drainage_areas" +) + +drainage_areas_df = drainage_areas_df[drainage_areas_df.buffer(-10).intersects(basin_polygon)] + +for idx, geometry in enumerate(holes_df): + # select drainage-area + drainage_area_select = drainage_areas_df[drainage_areas_df.contains(geometry.buffer(-10))] + if not drainage_area_select.empty: + if not len(drainage_area_select) == 1: + raise ValueError("hole contained by multiple drainage areas, can't fix that yet") + + drainage_area = drainage_area_select.iloc[0].geometry + + # find basin_id to merge to + selected_basins_df = model.basin.area.df[ + model.basin.area.df.node_id.isin(model.basin.node.df[model.basin.node.df.within(drainage_area)].index) + ].set_index("node_id") + if selected_basins_df.empty: + selected_basins_df = model.basin.area.df[ + model.basin.area.df.buffer(-10).intersects(drainage_area) + ].set_index("node_id") + + assigned_basin_id = selected_basins_df.intersection(geometry.buffer(10)).area.idxmax() + + # clip and merge geometry + geometry = geometry.buffer(10).difference(basin_polygon) + geometry = ( + model.basin.area.df.set_index("node_id") + .at[assigned_basin_id, "geometry"] + .union(geometry) + .buffer(0.1) + .buffer(-0.1) + ) + + if isinstance(geometry, Polygon): + geometry = MultiPolygon([geometry]) + model.basin.area.df.loc[model.basin.area.df.node_id == assigned_basin_id, "geometry"] = geometry + +# buffer out small slivers +model.basin.area.df.loc[:, ["geometry"]] = ( + model.basin.area.df.buffer(0.1) + .buffer(-0.1) + .apply(lambda x: x if x.geom_type == "MultiPolygon" else MultiPolygon([x])) +) +# %% +# basin-profielen updaten + +df = pd.DataFrame( + { + "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), + "level": [0.0, 1.0] * len(model.basin.node.df), + "area": [0.01, 1000.0] * len(model.basin.node.df), + } +) +df.index.name = "fid" +model.basin.profile.df = df + +df = model.basin.profile.df.groupby("node_id")[["level"]].max().reset_index() +df.index.name = "fid" +model.basin.state.df = df + +# %% +# tabulated_rating_curves updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.tabulated_rating_curve.node.df.index.to_numpy(), 2), + "level": [0.0, 5] * len(model.tabulated_rating_curve.node.df), + "flow_rate": [0, 0.1] * len(model.tabulated_rating_curve.node.df), + } +) +df.index.name = "fid" +model.tabulated_rating_curve.static.df = df + + +# %% + +# level_boundaries updaten +df = pd.DataFrame( + { + "node_id": model.level_boundary.node.df.index.to_list(), + "level": [0.0] * len(model.level_boundary.node.df), + } +) +df.index.name = "fid" +model.level_boundary.static.df = df + +# %% +# manning_resistance updaten +length = len(model.manning_resistance.node.df) +df = pd.DataFrame( + { + "node_id": model.manning_resistance.node.df.index.to_list(), + "length": [100.0] * length, + "manning_n": [100.0] * length, + "profile_width": [100.0] * length, + "profile_slope": [100.0] * length, + } +) +df.index.name = "fid" +model.manning_resistance.static.df = df + +# %% write model + +model.basin.area.df.loc[:, ["meta_area"]] = model.basin.area.df.area +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file("missing_areas.gpkg") + + +# model.use_validation = True +model.write(ribasim_toml) + +# %% diff --git a/notebooks/vechtstromen/99_upload_model.py b/notebooks/vechtstromen/99_upload_model.py new file mode 100644 index 0000000..31aa38b --- /dev/null +++ b/notebooks/vechtstromen/99_upload_model.py @@ -0,0 +1,6 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +cloud.upload_model("Vechtstromen", "Vechtstromen", include_results=False, include_plots=False) diff --git a/src/ribasim_nl/ribasim_nl/geometry.py b/src/ribasim_nl/ribasim_nl/geometry.py index 379333d..1457da7 100644 --- a/src/ribasim_nl/ribasim_nl/geometry.py +++ b/src/ribasim_nl/ribasim_nl/geometry.py @@ -104,6 +104,37 @@ def split_basin(basin_polygon: Polygon, line: LineString) -> MultiPolygon: return MultiPolygon(sort_basins(keep_polys)) +def split_basin_multi_polygon(basin_polygon: MultiPolygon, line: LineString) -> tuple[MultiPolygon, MultiPolygon]: + """Split a MultiPolygon into two given a LineString.""" + line_centre = line.interpolate(0.5, normalized=True) + + # get the polygon to cut + basin_geoms = list(basin_polygon.geoms) + if len(basin_geoms) == 0: + cut_idx = 0 + else: + try: + cut_idx = next(idx for idx, i in enumerate(basin_geoms) if i.contains(line_centre)) + except StopIteration: + cut_idx = next(idx for idx, i in enumerate(basin_geoms) if line.intersects(i)) + + # split it + right_basin_poly, left_basin_poly = split_basin(basin_geoms[cut_idx], line).geoms + + # concat left-over polygons to the right-side + right_basin_poly = [right_basin_poly] + left_basin_poly = [left_basin_poly] + + for idx, geom in enumerate(basin_geoms): + if idx != cut_idx: + if geom.distance(right_basin_poly[0]) < geom.distance(left_basin_poly[0]): + right_basin_poly += [geom] + else: + left_basin_poly += [geom] + + return MultiPolygon(right_basin_poly), MultiPolygon(left_basin_poly) + + def drop_z(geometry: LineString | MultiPolygon | Point | Polygon) -> Point | Polygon | MultiPolygon: """Drop the z-coordinate of a geometry if it has. @@ -145,3 +176,21 @@ def drop_z(geometry: LineString | MultiPolygon | Point | Polygon) -> Point | Pol ) return geometry + + +def edge(point_from: Point, point_to: Point) -> LineString: + """Create a LineString geometry between two Point geometries, dropping z-coordinate if any + + Args: + point_from (Point): _description_ + point_to (Point): _description_ + + Returns + ------- + LineString: LineString without z-coordinate + """ + if point_from.has_z: + point_from = Point(point_from.x, point_from.y) + if point_to.has_z: + point_to = Point(point_to.x, point_to.y) + return LineString((point_from, point_to)) diff --git a/src/ribasim_nl/ribasim_nl/model.py b/src/ribasim_nl/ribasim_nl/model.py index f8a4b51..0a9a8f6 100644 --- a/src/ribasim_nl/ribasim_nl/model.py +++ b/src/ribasim_nl/ribasim_nl/model.py @@ -1,14 +1,16 @@ from pathlib import Path from typing import Literal +import networkx as nx import pandas as pd from pydantic import BaseModel from ribasim import Model, Node from ribasim.geometry.edge import NodeData -from shapely.geometry import LineString, Point +from shapely.geometry import LineString, MultiPolygon, Point, Polygon from shapely.geometry.base import BaseGeometry from ribasim_nl.case_conversions import pascal_to_snake_case +from ribasim_nl.geometry import split_basin def read_arrow(filepath: Path) -> pd.DataFrame: @@ -46,6 +48,10 @@ def basin_results(self): self._basin_results = BasinResults(filepath=filepath) return self._basin_results + @property + def graph(self): + return nx.from_pandas_edgelist(self.edge.df[["from_node_id", "to_node_id"]], "from_node_id", "to_node_id") + @property def next_node_id(self): return self.node_table().df.index.max() + 1 @@ -260,15 +266,18 @@ def add_control_node( for _to_node_id in to_node_id: self.edge.add(table[node_id], self.get_node(_to_node_id)) - def reverse_edge(self, from_node_id: int, to_node_id: int): + def reverse_edge(self, from_node_id: int | None = None, to_node_id: int | None = None, edge_id: int | None = None): """Reverse an edge""" if self.edge.df is not None: - # get original edge-data - df = self.edge.df.copy() - df.loc[:, ["edge_id"]] = df.index - df = df.set_index(["from_node_id", "to_node_id"], drop=False) - edge_data = dict(df.loc[from_node_id, to_node_id]) - edge_id = edge_data["edge_id"] + if edge_id is None: + # get original edge-data + df = self.edge.df.copy() + df.loc[:, ["edge_id"]] = df.index + df = df.set_index(["from_node_id", "to_node_id"], drop=False) + edge_data = dict(df.loc[from_node_id, to_node_id]) + edge_id = edge_data["edge_id"] + else: + edge_data = dict(self.edge.df.loc[edge_id]) # revert node ids self.edge.df.loc[edge_id, ["from_node_id"]] = edge_data["to_node_id"] @@ -294,6 +303,25 @@ def remove_edge(self, from_node_id: int, to_node_id: int, remove_disconnected_no if node_id not in self.edge.df[["from_node_id", "to_node_id"]].to_numpy().ravel(): self.remove_node(node_id) + def remove_edges(self, edge_ids: list[int]): + if self.edge.df is not None: + self.edge.df = self.edge.df[~self.edge.df.index.isin(edge_ids)] + + def move_node(self, node_id: int, geometry: Point): + node_type = self.node_table().df.at[node_id, "node_type"] + + # read existing table + table = getattr(self, pascal_to_snake_case(node_type)) + + # update geometry + table.node.df.loc[node_id, ["geometry"]] = geometry + + # reset all edges + edge_ids = self.edge.df[ + (self.edge.df.from_node_id == node_id) | (self.edge.df.to_node_id == node_id) + ].index.to_list() + self.reset_edge_geometry(edge_ids=edge_ids) + def find_closest_basin(self, geometry: BaseGeometry, max_distance: float | None) -> NodeData: """Find the closest basin_node.""" # only works when basin area are defined @@ -361,7 +389,9 @@ def reset_edge_geometry(self, edge_ids: list | None = None): df = self.edge.df for row in df.itertuples(): - geometry = LineString([node_df.at[row.from_node_id, "geometry"], node_df.at[row.to_node_id, "geometry"]]) + from_point = Point(node_df.at[row.from_node_id, "geometry"].x, node_df.at[row.from_node_id, "geometry"].y) + to_point = Point(node_df.at[row.to_node_id, "geometry"].x, node_df.at[row.to_node_id, "geometry"].y) + geometry = LineString([from_point, to_point]) self.edge.df.loc[row.Index, ["geometry"]] = geometry @property @@ -373,3 +403,116 @@ def edge_from_node_type(self): def edge_to_node_type(self): node_df = self.node_table().df return self.edge.df.to_node_id.apply(lambda x: node_df.at[x, "node_type"] if x in node_df.index else None) + + def split_basin(self, line: LineString): + if self.basin.area.df is None: + raise ValueError("provide basin / area table first") + + line_centre = line.interpolate(0.5, normalized=True) + basin_area_df = self.basin.area.df[self.basin.area.df.contains(line_centre)] + + if len(basin_area_df) != 1: + raise ValueError("Overlapping basin-areas at cut_line") + + # get all we need + basin_fid = int(basin_area_df.iloc[0].name) + basin_geometry = basin_area_df.iloc[0].geometry + self.basin.area.df = self.basin.area.df[self.basin.area.df.index != basin_fid] + + # get the polygon to cut + basin_geoms = list(basin_geometry.geoms) + cut_idx = next(idx for idx, i in enumerate(basin_geoms) if i.contains(line_centre)) + + # split it + right_basin_poly, left_basin_poly = split_basin(basin_geoms[cut_idx], line).geoms + + # concat left-over polygons to the right-side + right_basin_poly = [right_basin_poly] + left_basin_poly = [left_basin_poly] + + for idx, geom in enumerate(basin_geoms): + if idx != cut_idx: + if geom.distance(right_basin_poly[0]) < geom.distance(left_basin_poly[0]): + right_basin_poly += [geom] + else: + left_basin_poly += [geom] + + right_basin_poly = MultiPolygon(right_basin_poly) + left_basin_poly = MultiPolygon(left_basin_poly) + + for poly in [right_basin_poly, left_basin_poly]: + if self.basin.node.df.geometry.within(poly).any(): + node_ids = self.basin.node.df[self.basin.node.df.geometry.within(poly)].index.to_list() + if len(node_ids) == 1: + if node_ids[0] in self.basin.area.df.node_id.to_numpy(): + self.basin.area.df.loc[self.basin.area.df.node_id.isin(node_ids), ["geometry"]] = poly + else: + self.basin.area.df.loc[self.basin.area.df.index.max() + 1] = { + "node_id": node_ids[0], + "geometry": poly, + } + else: + self.basin.area.df.loc[self.basin.area.df.index.max() + 1, ["geometry"]] = poly + else: + self.basin.area.df.loc[self.basin.area.df.index.max() + 1, ["geometry"]] = poly + + if self.basin.area.df.crs is None: + self.basin.area.df.crs = self.crs + + def redirect_edge(self, edge_id: int, from_node_id: int | None = None, to_node_id: int | None = None): + if self.edge.df is not None: + if from_node_id is not None: + self.edge.df.loc[edge_id, ["from_node_id"]] = from_node_id + if to_node_id is not None: + self.edge.df.loc[edge_id, ["to_node_id"]] = to_node_id + + self.reset_edge_geometry(edge_ids=[edge_id]) + + def merge_basins(self, basin_id: int, to_basin_id: int, are_connected=True): + for node_id in (basin_id, to_basin_id): + if node_id not in self.basin.node.df.index: + raise ValueError(f"{node_id} is not a basin") + + if are_connected: + paths = [i for i in nx.all_shortest_paths(self.graph, basin_id, to_basin_id) if len(i) == 3] + + if len(paths) == 0: + raise ValueError(f"basin {basin_id} not a direct neighbor of basin {to_basin_id}") + + # remove flow-node and connected edges + for path in paths: + self.remove_node(path[1], remove_edges=True) + + # get a complete edge-list to modify + edge_ids = self.edge.df[self.edge.df.from_node_id == basin_id].index.to_list() + edge_ids += self.edge.df[self.edge.df.to_node_id == basin_id].index.to_list() + + # correct edge from and to attributes + self.edge.df.loc[self.edge.df.from_node_id == basin_id, "from_node_id"] = to_basin_id + self.edge.df.loc[self.edge.df.to_node_id == basin_id, "to_node_id"] = to_basin_id + + # reset edge geometries + self.reset_edge_geometry(edge_ids=edge_ids) + + # merge area if basin has any assigned to it + if basin_id in self.basin.area.df.node_id.to_numpy(): + poly = self.basin.area.df.set_index("node_id").at[basin_id, "geometry"] + + # if to_basin_id has area we union both areas + if to_basin_id in self.basin.area.df.node_id.to_numpy(): + poly = poly.union(self.basin.area.df.set_index("node_id").at[to_basin_id, "geometry"]) + if isinstance(poly, Polygon): + poly = MultiPolygon([poly]) + self.basin.area.df.loc[self.basin.area.df.node_id == to_basin_id, ["geometry"]] = poly + + # else we add a record to basin + else: + if isinstance(poly, Polygon): + poly = MultiPolygon([poly]) + self.basin.area.df.loc[self.basin.area.df.index.max() + 1] = {"node_id": to_basin_id, "geometry": poly} + + # finally we remove the basin + self.remove_node(basin_id) + + if self.basin.area.df.crs is None: + self.basin.area.df.crs = self.crs diff --git a/src/ribasim_nl/ribasim_nl/network_validator.py b/src/ribasim_nl/ribasim_nl/network_validator.py index 72472c5..13cb07d 100644 --- a/src/ribasim_nl/ribasim_nl/network_validator.py +++ b/src/ribasim_nl/ribasim_nl/network_validator.py @@ -1,5 +1,6 @@ from dataclasses import dataclass +import geopandas as gpd from ribasim import Model @@ -68,6 +69,31 @@ def node_internal_basin(self): mask = self.node_df.apply(lambda row: check_internal_basin(row, self.edge_df), axis=1) return self.node_df[mask] + def node_invalid_connectivity(self, tolerance: float = 1.0) -> gpd.GeoDataFrame: + """Check if node_from and node_to are correct on edge""" + node_df = self.node_df + invalid_edges_df = self.edge_incorrect_connectivity() + invalid_nodes = [] + for row in invalid_edges_df.itertuples(): + geoms = row.geometry.boundary.geoms + + for idx, attr in ((0, "from_node_id"), (1, "to_node_id")): + node_id = getattr(row, attr) + point = geoms[idx] + if node_id in node_df.index: + if point.distance(node_df.at[node_id, "geometry"]) > tolerance: + invalid_nodes += [{"node_id": node_id, "geometry": point}] + else: + invalid_nodes += [{"node_id": node_id, "geometry": point}] + + if invalid_nodes: + df = gpd.GeoDataFrame(invalid_nodes, crs=node_df.crs) + df.drop_duplicates(inplace=True) + else: + df = gpd.GeoDataFrame({"node_id": []}, geometry=gpd.GeoSeries(), crs=node_df.crs) + + return df + def edge_duplicated(self): """Check if the `from_node_id` and `to_node_id` in the edge-table is duplicated""" return self.edge_df[self.edge_df.duplicated(subset=["from_node_id", "to_node_id"], keep=False)] From 05dfcb4f1aa554d9480c2e2a93107e165d01ad5b Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Thu, 17 Oct 2024 11:06:36 +0200 Subject: [PATCH 11/23] Make AGV preprocessing reproducible (#165) Instead of local paths, this uses the CloudStorage class from the `ribasim_nl` package to load AGV data, such that this code can also run on other machines. I uncommented the first processing part since that was needed for the second part to run. This also adds an upload of the processed data to the cloud to `AmstelGooienVecht/verwerkt/preprocessed.gpkg`, so the result of this step is directly accesible as well. It should not change and results, but it's good to go over this with @rbruijnshkv tomorrrow. --- .../preprocess_data/AmstelGooienVecht.ipynb | 186 +++++++++--------- 1 file changed, 93 insertions(+), 93 deletions(-) diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb index ef0bc05..8c71596 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb @@ -7,12 +7,12 @@ "metadata": {}, "outputs": [], "source": [ - "# import packages and functions\n", - "import os\n", - "\n", "import geopandas as gpd\n", "import numpy as np\n", - "import pandas as pd" + "import pandas as pd\n", + "from general_functions import show_layers_and_columns, store_data\n", + "from ribasim_nl import CloudStorage\n", + "from shapely import wkt" ] }, { @@ -22,19 +22,19 @@ "metadata": {}, "outputs": [], "source": [ - "from general_functions import *\n", - "\n", "%load_ext autoreload\n", "%autoreload 2\n", "pd.set_option(\"display.max_columns\", None)" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "id": "2", "metadata": {}, + "outputs": [], "source": [ - "# Amstel, Gooi en Vecht" + "cloud = CloudStorage()" ] }, { @@ -44,11 +44,8 @@ "metadata": {}, "outputs": [], "source": [ - "# define relative paths\n", "waterschap = \"AVG\"\n", - "path_AVG = \"..\\..\\Data_preprocessed\\Waterschappen\\AmstelGooienVecht\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/AmstelGooienVecht\"\n", - "DM_path = \"..\\..\\Data_overig\\DM_Netwerk\\DM_Netwerk.shp\"" + "waterschap_long = \"AmstelGooienVecht\"" ] }, { @@ -58,58 +55,95 @@ "metadata": {}, "outputs": [], "source": [ - "# #AVG has delivered all data in CSV format. Load it in manually with some data mutations\n", - "# AVG = {}\n", - "# variables = ['stuw', 'gemaal', 'afsluitmiddel', 'duikersifonhevel', 'hydroobject']#, 'peilgebiedpraktijk', 'peilafwijkinggebied']\n", - "# for variable in variables:\n", - "# path_variable = os.path.join(path_AVG, variable + '.csv')\n", - "# df_var = pd.read_csv(path_variable, delimiter=';')\n", - "# geom_col = df_var.keys()[-1] #retrieve the column name\n", + "cloud.download_aangeleverd(waterschap_long)" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "# Amstel, Gooi en Vecht" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "# define paths\n", + "aangeleverd_dir = cloud.joinpath(waterschap_long, \"aangeleverd\")\n", + "verwerkt_dir = cloud.joinpath(waterschap_long, \"verwerkt\")\n", "\n", - "# if not 'geometrie' in geom_col:\n", - "# raise ValueError('No \"geometry\" string found in the last column of the dataframe. Check for existence')\n", + "output_gpkg_path = verwerkt_dir / \"preprocessed\"\n", "\n", - "# df_var['geometry'] = df_var[geom_col].apply(lambda x: wkt.loads(x.split(';')[-1]))\n", - "# AVG[variable] = df_var\n", + "# pyogrio needs the exclamation mark to read the file from the zip\n", + "dump_path = (\n", + " aangeleverd_dir / \"aanlevering_6maart24/data dump 6 maart LHM AGV.zip!/data dump 6 maart LHM AGV/\"\n", + ").as_posix()\n", "\n", - "# #there is one last gpkg which contains the streefpeilen (and peilgebieden)\n", - "# AVG['peilgebied'] = gpd.read_file(os.path.join(path_AVG, 'vigerende_peilgebieden.gpkg'))" + "verwerkt_dir.mkdir(parents=True, exist_ok=True)" ] }, { "cell_type": "code", "execution_count": null, - "id": "5", + "id": "7", "metadata": {}, "outputs": [], "source": [ - "# AVG['peilgebied']['streefpeil'] = np.nan\n", - "# AVG['peilgebied']['streefpeil'] = AVG['peilgebied']['streefpeil'].fillna(value=AVG['peilgebied']['GPGZMRPL'])\n", - "# AVG['peilgebied']['streefpeil'] = AVG['peilgebied']['streefpeil'].fillna(value=AVG['peilgebied']['IWS_GPGVASTP'])\n", - "# AVG['peilgebied']['streefpeil'] = AVG['peilgebied']['streefpeil'].fillna(value=AVG['peilgebied']['IWS_GPGONDP'])\n", + "# AVG has delivered all data in CSV format. Load it in manually with some data mutations\n", + "AVG = {}\n", + "variables = [\n", + " \"stuw\",\n", + " \"gemaal\",\n", + " \"afsluitmiddel\",\n", + " \"duikersifonhevel\",\n", + " \"hydroobject\",\n", + "] # , 'peilgebiedpraktijk', 'peilafwijkinggebied']\n", + "for variable in variables:\n", + " path_variable = aangeleverd_dir / \"Eerste_levering\" / (variable + \".csv\")\n", + " df_var = pd.read_csv(path_variable, delimiter=\";\")\n", + " geom_col = df_var.keys()[-1] # retrieve the column name\n", "\n", + " if \"geometrie\" not in geom_col:\n", + " raise ValueError('No \"geometry\" string found in the last column of the dataframe. Check for existence')\n", "\n", - "# print('Number of missing streefpeilen = ', len(AVG['peilgebied']['streefpeil'].loc[AVG['peilgebied']['streefpeil'].isna()]))\n", + " df_var[\"geometry\"] = df_var[geom_col].apply(lambda x: wkt.loads(x.split(\";\")[-1]))\n", + " AVG[variable] = df_var\n", "\n", - "# fig, ax = plt.subplots()\n", - "# AVG['peilgebied'].geometry.plot(ax=ax, color='cornflowerblue')\n", - "# AVG['peilgebied'].loc[AVG['peilgebied']['streefpeil'].isna()].geometry.plot(ax=ax, color='red')\n", - "# ax.legend()" + "# there is one last gpkg which contains the streefpeilen (and peilgebieden)\n", + "AVG[\"peilgebied\"] = gpd.read_file(aangeleverd_dir / \"Na_levering\" / \"vigerende_peilgebieden.gpkg\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "6", + "id": "8", "metadata": {}, "outputs": [], "source": [ - "AVG = {}" + "AVG[\"peilgebied\"][\"streefpeil\"] = np.nan\n", + "AVG[\"peilgebied\"][\"streefpeil\"] = AVG[\"peilgebied\"][\"streefpeil\"].fillna(value=AVG[\"peilgebied\"][\"GPGZMRPL\"])\n", + "AVG[\"peilgebied\"][\"streefpeil\"] = AVG[\"peilgebied\"][\"streefpeil\"].fillna(value=AVG[\"peilgebied\"][\"IWS_GPGVASTP\"])\n", + "AVG[\"peilgebied\"][\"streefpeil\"] = AVG[\"peilgebied\"][\"streefpeil\"].fillna(value=AVG[\"peilgebied\"][\"IWS_GPGONDP\"])\n", + "\n", + "print(\n", + " \"Number of missing streefpeilen = \",\n", + " len(AVG[\"peilgebied\"][\"streefpeil\"].loc[AVG[\"peilgebied\"][\"streefpeil\"].isna()]),\n", + ")\n", + "\n", + "# fig, ax = plt.subplots()\n", + "# AVG['peilgebied'].geometry.plot(ax=ax, color='cornflowerblue')\n", + "# AVG['peilgebied'].loc[AVG['peilgebied']['streefpeil'].isna()].geometry.plot(ax=ax, color='red')\n", + "# ax.legend()" ] }, { "cell_type": "markdown", - "id": "7", + "id": "9", "metadata": {}, "source": [ "# Nalevering" @@ -118,37 +152,26 @@ { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "10", "metadata": {}, "outputs": [], "source": [ "# overwrite previous data\n", - "AVG[\"stuw\"] = gpd.read_file(\n", - " r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\AmstelGooienVecht\\data dump 6 maart LHM AGV\\Stuw.shp\"\n", - ")\n", + "AVG[\"stuw\"] = gpd.read_file(dump_path + \"/Stuw.shp\")\n", "AVG[\"stuw\"] = AVG[\"stuw\"].loc[AVG[\"stuw\"].LHM == \"LHM\"]\n", "\n", - "AVG[\"gemaal\"] = gpd.read_file(\n", - " r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\AmstelGooienVecht\\data dump 6 maart LHM AGV\\Gemaal.shp\"\n", - ")\n", + "AVG[\"gemaal\"] = gpd.read_file(dump_path + \"/Gemaal.shp\")\n", "AVG[\"gemaal\"] = AVG[\"gemaal\"].loc[AVG[\"gemaal\"].LHM == \"LHM\"]\n", "\n", - "AVG[\"duikersifonhevel\"] = gpd.read_file(\n", - " r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\AmstelGooienVecht\\data dump 6 maart LHM AGV\\Duikersifonhevel.shp\"\n", - ")\n", - "AVG[\"hydroobject\"] = gpd.read_file(\n", - " r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\AmstelGooienVecht\\data dump 6 maart LHM AGV\\LHM_hydrovakken.shp\"\n", - ")\n", - "\n", - "AVG[\"peilgebied\"] = gpd.read_file(\n", - " r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\AmstelGooienVecht\\data dump 6 maart LHM AGV\\LHM_gebieden.shp\"\n", - ")" + "AVG[\"duikersifonhevel\"] = gpd.read_file(dump_path + \"/DuikerSifonHevel.shp\")\n", + "AVG[\"hydroobject\"] = gpd.read_file(dump_path + \"/LHM_hydrovakken.shp\")\n", + "AVG[\"peilgebied\"] = gpd.read_file(dump_path + \"/LHM_gebieden.shp\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -160,7 +183,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -170,7 +193,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -192,9 +215,7 @@ "AVG[\"gemaal\"].loc[AVG[\"gemaal\"].functiegemaal.str.contains(\"anvoergemaal|pmaling|an-|p-|pvoer\"), \"func_aanvoer\"] = True\n", "AVG[\"gemaal\"].loc[AVG[\"gemaal\"].functiegemaal.str.contains(\"irculatie\"), \"func_circulatie\"] = True\n", "AVG[\"gemaal\"].loc[\n", - " (AVG[\"gemaal\"].func_afvoer is False)\n", - " & (AVG[\"gemaal\"].func_aanvoer is False)\n", - " & (AVG[\"gemaal\"].func_circulatie is False),\n", + " ~AVG[\"gemaal\"].func_afvoer & ~AVG[\"gemaal\"].func_aanvoer & ~AVG[\"gemaal\"].func_circulatie,\n", " \"func_afvoer\",\n", "] = True # set to afvoergemaal is there the function is unknown" ] @@ -202,7 +223,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -219,7 +240,7 @@ "AVG[\"gemaal\"] = gpd.GeoDataFrame(AVG[\"gemaal\"]).to_crs(\"epsg:28992\")\n", "\n", "# afsluitmiddel\n", - "AVG[\"afsluitmiddel\"] = AVG[\"afsluitmiddel\"][[\"code\", \"geometry\"]]\n", + "AVG[\"afsluitmiddel\"] = AVG[\"afsluitmiddel\"][[\"code\", \"geometry\"]].copy()\n", "AVG[\"afsluitmiddel\"].loc[:, \"nen3610id\"] = \"dummy_nen3610id_afsluitmiddel_\" + AVG[\"afsluitmiddel\"].index.astype(str)\n", "AVG[\"afsluitmiddel\"][\"globalid\"] = \"dummy_globalid_afsluitmiddel_\" + AVG[\"afsluitmiddel\"].index.astype(str)\n", "AVG[\"afsluitmiddel\"] = gpd.GeoDataFrame(AVG[\"afsluitmiddel\"]).set_crs(\"epsg:28992\")\n", @@ -252,7 +273,6 @@ "AVG[\"peilgebied\"][\"nen3610id\"] = \"dummy_nen3610id_peilgebied_\" + AVG[\"peilgebied\"].index.astype(str)\n", "AVG[\"peilgebied\"][\"globalid\"] = \"dummy_globalid_peilgebied_\" + AVG[\"peilgebied\"].index.astype(str)\n", "\n", - "\n", "AVG[\"peilgebied\"] = AVG[\"peilgebied\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", "AVG[\"peilgebied\"] = gpd.GeoDataFrame(AVG[\"peilgebied\"]).to_crs(\"epsg:28992\")" ] @@ -260,7 +280,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -270,17 +290,9 @@ "AVG[\"hydroobject\"] = gpd.GeoDataFrame(AVG[\"hydroobject\"]).set_crs(\"epsg:28992\")" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "markdown", - "id": "15", + "id": "16", "metadata": {}, "source": [ "# Control, store" @@ -289,7 +301,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -299,32 +311,20 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "18", "metadata": {}, "outputs": [], "source": [ - "# Check if the directory exists\n", - "if not os.path.exists(output_gpkg_path):\n", - " # If it doesn't exist, create it\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=AVG, output_gpkg_path=output_gpkg_path + \"/AGV\")" + "store_data(waterschap=AVG, output_gpkg_path=str(output_gpkg_path))\n", + "cloud.upload_verwerkt(waterschap_long)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Routing", + "display_name": "default", "language": "python", - "name": "routing" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -336,7 +336,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.13" + "version": "3.12.7" } }, "nbformat": 4, From e0f021f8005f10cd276e42eabaa3c6f0ab894927 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Thu, 17 Oct 2024 13:33:40 +0200 Subject: [PATCH 12/23] Afbouwen vv wrij (#166) - debug-script voor vv -> rekenend model - start met wrij (nog niet af) - alle gebieden: reviewscript voert modelfouten.gpkg uit --- notebooks/aa_en_maas/00_get_model.py | 17 ++ .../aa_en_maas/00_review_model_network.py | 58 +++++ notebooks/aaenmaas/get_model.py | 9 - notebooks/brabantse_delta/00_get_model.py | 19 ++ .../00_review_model_network.py | 58 +++++ notebooks/hunze_en_aas/00_get_model.py | 19 ++ .../hunze_en_aas/00_review_model_network.py | 58 +++++ notebooks/limburg/00_get_model.py | 19 ++ notebooks/limburg/00_review_model_network.py | 58 +++++ notebooks/modelkwaliteit.py | 44 ++++ notebooks/noorderzijlvest/00_get_model.py | 17 ++ .../00_review_model_network.py | 54 +++++ notebooks/rijn_en_ijssel/00_get_model.py | 17 ++ .../rijn_en_ijssel/00_review_model_network.py | 58 +++++ .../rijn_en_ijssel/01_fix_model_network.py | 211 ++++++++++++++++++ notebooks/stichtse_rijnlanden/00_get_model.py | 17 ++ .../00_review_model_network.py | 54 +++++ notebooks/vallei_en_veluwe/00_get_model.py | 17 ++ .../00_review_model_network.py | 54 +++++ .../vallei_en_veluwe/01_fix_model_network.py | 211 ++++++++++++++++++ .../vechtstromen/00_review_model_network.py | 58 +++++ src/ribasim_nl/ribasim_nl/geometry.py | 3 +- src/ribasim_nl/ribasim_nl/model.py | 104 +++++++-- .../ribasim_nl/network_validator.py | 2 +- 24 files changed, 1208 insertions(+), 28 deletions(-) create mode 100644 notebooks/aa_en_maas/00_get_model.py create mode 100644 notebooks/aa_en_maas/00_review_model_network.py delete mode 100644 notebooks/aaenmaas/get_model.py create mode 100644 notebooks/brabantse_delta/00_get_model.py create mode 100644 notebooks/brabantse_delta/00_review_model_network.py create mode 100644 notebooks/hunze_en_aas/00_get_model.py create mode 100644 notebooks/hunze_en_aas/00_review_model_network.py create mode 100644 notebooks/limburg/00_get_model.py create mode 100644 notebooks/limburg/00_review_model_network.py create mode 100644 notebooks/modelkwaliteit.py create mode 100644 notebooks/noorderzijlvest/00_get_model.py create mode 100644 notebooks/noorderzijlvest/00_review_model_network.py create mode 100644 notebooks/rijn_en_ijssel/00_get_model.py create mode 100644 notebooks/rijn_en_ijssel/00_review_model_network.py create mode 100644 notebooks/rijn_en_ijssel/01_fix_model_network.py create mode 100644 notebooks/stichtse_rijnlanden/00_get_model.py create mode 100644 notebooks/stichtse_rijnlanden/00_review_model_network.py create mode 100644 notebooks/vallei_en_veluwe/00_get_model.py create mode 100644 notebooks/vallei_en_veluwe/00_review_model_network.py create mode 100644 notebooks/vallei_en_veluwe/01_fix_model_network.py create mode 100644 notebooks/vechtstromen/00_review_model_network.py diff --git a/notebooks/aa_en_maas/00_get_model.py b/notebooks/aa_en_maas/00_get_model.py new file mode 100644 index 0000000..955c5cf --- /dev/null +++ b/notebooks/aa_en_maas/00_get_model.py @@ -0,0 +1,17 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "AaenMaas" +short_name = "aam" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) diff --git a/notebooks/aa_en_maas/00_review_model_network.py b/notebooks/aa_en_maas/00_review_model_network.py new file mode 100644 index 0000000..b408d40 --- /dev/null +++ b/notebooks/aa_en_maas/00_review_model_network.py @@ -0,0 +1,58 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "AaenMaas" +short_name = "aam" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +mask = model.edge.df.geometry.length == 0 +model.edge.df[mask].centroid.to_file(modelfouten_gpkg, layer="edge_zonder_lengte") +model.edge.df = model.edge.df[~mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/aaenmaas/get_model.py b/notebooks/aaenmaas/get_model.py deleted file mode 100644 index 29dd5a3..0000000 --- a/notebooks/aaenmaas/get_model.py +++ /dev/null @@ -1,9 +0,0 @@ -# %% -from ribasim_nl import CloudStorage - -cloud = CloudStorage() - -aaenmaas_url = cloud.joinurl("AaenMaas", "modellen", "AaenMaas_2024_6_3") - -# %% -cloud.download_content(aaenmaas_url) diff --git a/notebooks/brabantse_delta/00_get_model.py b/notebooks/brabantse_delta/00_get_model.py new file mode 100644 index 0000000..77b776f --- /dev/null +++ b/notebooks/brabantse_delta/00_get_model.py @@ -0,0 +1,19 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "BrabantseDelta" +short_name = "wbd" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) + +# %% diff --git a/notebooks/brabantse_delta/00_review_model_network.py b/notebooks/brabantse_delta/00_review_model_network.py new file mode 100644 index 0000000..d20b24b --- /dev/null +++ b/notebooks/brabantse_delta/00_review_model_network.py @@ -0,0 +1,58 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "BrabantseDelta" +short_name = "wbd" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +mask = model.edge.df.geometry.length == 0 +model.edge.df[mask].centroid.to_file(modelfouten_gpkg, layer="edge_zonder_lengte") +model.edge.df = model.edge.df[~mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/hunze_en_aas/00_get_model.py b/notebooks/hunze_en_aas/00_get_model.py new file mode 100644 index 0000000..0e99073 --- /dev/null +++ b/notebooks/hunze_en_aas/00_get_model.py @@ -0,0 +1,19 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "HunzeenAas" +short_name = "hea" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) + +# %% diff --git a/notebooks/hunze_en_aas/00_review_model_network.py b/notebooks/hunze_en_aas/00_review_model_network.py new file mode 100644 index 0000000..2640da1 --- /dev/null +++ b/notebooks/hunze_en_aas/00_review_model_network.py @@ -0,0 +1,58 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "HunzeenAas" +short_name = "hea" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +mask = model.edge.df.geometry.length == 0 +model.edge.df[mask].centroid.to_file(modelfouten_gpkg, layer="edge_zonder_lengte") +model.edge.df = model.edge.df[~mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/limburg/00_get_model.py b/notebooks/limburg/00_get_model.py new file mode 100644 index 0000000..9b5b2fc --- /dev/null +++ b/notebooks/limburg/00_get_model.py @@ -0,0 +1,19 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "Limburg" +short_name = "limburg" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) + +# %% diff --git a/notebooks/limburg/00_review_model_network.py b/notebooks/limburg/00_review_model_network.py new file mode 100644 index 0000000..2fc14ff --- /dev/null +++ b/notebooks/limburg/00_review_model_network.py @@ -0,0 +1,58 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "Limburg" +short_name = "limburg" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +mask = model.edge.df.geometry.length == 0 +model.edge.df[mask].centroid.to_file(modelfouten_gpkg, layer="edge_zonder_lengte") +model.edge.df = model.edge.df[~mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/modelkwaliteit.py b/notebooks/modelkwaliteit.py new file mode 100644 index 0000000..a975a12 --- /dev/null +++ b/notebooks/modelkwaliteit.py @@ -0,0 +1,44 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model + +cloud = CloudStorage() + + +# %% +data = [] +for authority in cloud.water_authorities: + ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") + if ribasim_toml.exists(): + model = Model.read(ribasim_toml) + + # duplicated edges + duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) + model.edge.df.drop_duplicates(inplace=True) + + # non existing nodes + mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index + ) + nodes_not_existing = len(model.edge.df[~mask]) + model.edge.df = model.edge.df[mask] + + data += [ + { + "waterschap": authority, + "basin_nodes": len(model.basin.node.df), + "basin_areas": len(model.basin.area.df), + "basin_verschil": abs(len(model.basin.node.df) - len(model.basin.area.df)), + "basin_area_lt_5000m2": len(model.basin.area.df[model.basin.area.df.area < 5000]), + "verkeerde_in_uitstroom": len(model.invalid_topology_at_node()), + "dubbele_edges": duplicated_edges, + "niet-bestaande_knopen_bij_edge": nodes_not_existing, + } + ] + + +df = pd.DataFrame(data) + + +df.to_excel(cloud.joinpath("modelkwaliteit.xlsx"), index=False) diff --git a/notebooks/noorderzijlvest/00_get_model.py b/notebooks/noorderzijlvest/00_get_model.py new file mode 100644 index 0000000..102a2ac --- /dev/null +++ b/notebooks/noorderzijlvest/00_get_model.py @@ -0,0 +1,17 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "Noorderzijlvest" +short_name = "nzv" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) diff --git a/notebooks/noorderzijlvest/00_review_model_network.py b/notebooks/noorderzijlvest/00_review_model_network.py new file mode 100644 index 0000000..ab05e94 --- /dev/null +++ b/notebooks/noorderzijlvest/00_review_model_network.py @@ -0,0 +1,54 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "Noorderzijlvest" +short_name = "nzv" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/rijn_en_ijssel/00_get_model.py b/notebooks/rijn_en_ijssel/00_get_model.py new file mode 100644 index 0000000..1b7fa0d --- /dev/null +++ b/notebooks/rijn_en_ijssel/00_get_model.py @@ -0,0 +1,17 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "RijnenIJssel" +short_name = "wrij" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) diff --git a/notebooks/rijn_en_ijssel/00_review_model_network.py b/notebooks/rijn_en_ijssel/00_review_model_network.py new file mode 100644 index 0000000..b15fbd3 --- /dev/null +++ b/notebooks/rijn_en_ijssel/00_review_model_network.py @@ -0,0 +1,58 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "RijnenIJssel" +short_name = "wrij" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +mask = model.edge.df.geometry.length == 0 +model.edge.df[mask].centroid.to_file(modelfouten_gpkg, layer="edge_zonder_lengte") +model.edge.df = model.edge.df[~mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/rijn_en_ijssel/01_fix_model_network.py b/notebooks/rijn_en_ijssel/01_fix_model_network.py new file mode 100644 index 0000000..77a0ba0 --- /dev/null +++ b/notebooks/rijn_en_ijssel/01_fix_model_network.py @@ -0,0 +1,211 @@ +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin, level_boundary, manning_resistance, outlet +from ribasim_nl import CloudStorage, Model, NetworkValidator +from shapely.geometry import MultiPolygon + +cloud = CloudStorage() + +authority = "ValleienVeluwe" +short_name = "venv" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") +split_line_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True +) + +# %% read model +model = Model.read(ribasim_toml) +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_fix_model_network", f"{short_name}.toml") +network_validator = NetworkValidator(model) + +# %% some stuff we'll need again +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +level_data = level_boundary.Static(level=[0]) + +basin_data = [ + basin.Profile(level=[0.0, 1.0], area=[0.01, 1000.0]), + basin.Static( + drainage=[0.0], + potential_evaporation=[0.001 / 86400], + infiltration=[0.0], + precipitation=[0.005 / 86400], + ), + basin.State(level=[0]), +] +outlet_data = outlet.Static(flow_rate=[100]) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401873626 +# Verwijderen duplicate edges + +model.edge.df.drop_duplicates(inplace=True) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401876430 + +# Toevoegen ontbrekende basins (oplossen topologie) +basin_edges_df = network_validator.edge_incorrect_connectivity() +basin_nodes_df = network_validator.node_invalid_connectivity() +basin_edges_df.to_file("basin_edges.gpkg") +basin_nodes_df.to_file("basin_nodes.gpkg") + +for row in basin_nodes_df.itertuples(): + # maak basin-node + basin_node = model.basin.add(Node(geometry=row.geometry), tables=basin_data) + + # update edge_table + model.edge.df.loc[basin_edges_df[basin_edges_df.from_node_id == row.node_id].index, ["from_node_id"]] = ( + basin_node.node_id + ) + model.edge.df.loc[basin_edges_df[basin_edges_df.to_node_id == row.node_id].index, ["to_node_id"]] = ( + basin_node.node_id + ) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401959032 + +# Oplossen verkeerde takrichting +for edge_id in [1353, 933, 373, 401, 4, 1338]: + model.reverse_edge(edge_id=edge_id) + +# model.invalid_topology_at_node().to_file("topo_errors.gpkg") + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402031275 + +# Veluwemeer at Harderwijk verwijderen +for node_id in [24, 694]: + model.remove_node(node_id, remove_edges=True) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402229646 + +# Veluwemeer at Elburg verwijderen +for node_id in [3, 1277]: + model.remove_node(node_id, remove_edges=True) + +# %% https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402257101 + +model.fix_unassigned_basin_area() + +# %% https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402281396 + +# Verwijderen basins zonder area of toevoegen/opknippen basin /area +model.split_basin(line=split_line_gdf.at[1, "geometry"]) +model.split_basin(line=split_line_gdf.at[2, "geometry"]) +model.split_basin(line=split_line_gdf.at[3, "geometry"]) +model.merge_basins(basin_id=1150, to_basin_id=1101) +model.merge_basins(basin_id=1196, to_basin_id=1192) +model.merge_basins(basin_id=1202, to_basin_id=1049) +model.merge_basins(basin_id=1207, to_basin_id=837) +model.merge_basins(basin_id=1208, to_basin_id=851, are_connected=False) +model.merge_basins(basin_id=1210, to_basin_id=1090) +model.merge_basins(basin_id=1212, to_basin_id=823) +model.merge_basins(basin_id=1216, to_basin_id=751, are_connected=False) +model.merge_basins(basin_id=1217, to_basin_id=752) +model.merge_basins(basin_id=1219, to_basin_id=814) +model.merge_basins(basin_id=1220, to_basin_id=1118) +model.merge_basins(basin_id=1221, to_basin_id=1170) +model.update_node(1229, "LevelBoundary", data=[level_data]) +model.merge_basins(basin_id=1254, to_basin_id=1091, are_connected=False) +model.merge_basins(basin_id=1260, to_basin_id=1125, are_connected=False) +model.merge_basins(basin_id=1263, to_basin_id=863) +model.merge_basins(basin_id=1265, to_basin_id=974) +model.remove_node(node_id=539, remove_edges=True) +model.merge_basins(basin_id=1267, to_basin_id=1177, are_connected=False) +model.remove_node(1268, remove_edges=True) +model.remove_node(360, remove_edges=True) +model.remove_node(394, remove_edges=True) +model.merge_basins(basin_id=1269, to_basin_id=1087) +model.merge_basins(basin_id=1149, to_basin_id=1270, are_connected=False) + + +model.fix_unassigned_basin_area() +model.basin.area.df = model.basin.area.df[~model.basin.area.df.index.isin(model.unassigned_basin_area.index)] + +# %% +# corrigeren knoop-topologie + +# ManningResistance bovenstrooms LevelBoundary naar Outlet +for row in network_validator.edge_incorrect_type_connectivity().itertuples(): + model.update_node(row.from_node_id, "Outlet", data=[outlet_data]) + +# Inlaten van ManningResistance naar Outlet +for row in network_validator.edge_incorrect_type_connectivity( + from_node_type="LevelBoundary", to_node_type="ManningResistance" +).itertuples(): + model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) + + +# buffer out small slivers +model.basin.area.df.loc[:, ["geometry"]] = ( + model.basin.area.df.buffer(0.1) + .buffer(-0.1) + .apply(lambda x: x if x.geom_type == "MultiPolygon" else MultiPolygon([x])) +) + +# basin-profielen updaten + +df = pd.DataFrame( + { + "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), + "level": [0.0, 1.0] * len(model.basin.node.df), + "area": [0.01, 1000.0] * len(model.basin.node.df), + } +) +df.index.name = "fid" +model.basin.profile.df = df + +df = model.basin.profile.df.groupby("node_id")[["level"]].max().reset_index() +df.index.name = "fid" +model.basin.state.df = df + +# %% +# tabulated_rating_curves updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.tabulated_rating_curve.node.df.index.to_numpy(), 2), + "level": [0.0, 5] * len(model.tabulated_rating_curve.node.df), + "flow_rate": [0, 0.1] * len(model.tabulated_rating_curve.node.df), + } +) +df.index.name = "fid" +model.tabulated_rating_curve.static.df = df + + +# %% + +# level_boundaries updaten +df = pd.DataFrame( + { + "node_id": model.level_boundary.node.df.index.to_list(), + "level": [0.0] * len(model.level_boundary.node.df), + } +) +df.index.name = "fid" +model.level_boundary.static.df = df + +# %% +# manning_resistance updaten +length = len(model.manning_resistance.node.df) +df = pd.DataFrame( + { + "node_id": model.manning_resistance.node.df.index.to_list(), + "length": [100.0] * length, + "manning_n": [100.0] * length, + "profile_width": [100.0] * length, + "profile_slope": [100.0] * length, + } +) +df.index.name = "fid" +model.manning_resistance.static.df = df + + +# %% write model +# model.use_validation = True +model.write(ribasim_toml) + +# %% diff --git a/notebooks/stichtse_rijnlanden/00_get_model.py b/notebooks/stichtse_rijnlanden/00_get_model.py new file mode 100644 index 0000000..3df774d --- /dev/null +++ b/notebooks/stichtse_rijnlanden/00_get_model.py @@ -0,0 +1,17 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "StichtseRijnlanden" +short_name = "hdsr" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) diff --git a/notebooks/stichtse_rijnlanden/00_review_model_network.py b/notebooks/stichtse_rijnlanden/00_review_model_network.py new file mode 100644 index 0000000..90bc46e --- /dev/null +++ b/notebooks/stichtse_rijnlanden/00_review_model_network.py @@ -0,0 +1,54 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "StichtseRijnlanden" +short_name = "hdsr" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/vallei_en_veluwe/00_get_model.py b/notebooks/vallei_en_veluwe/00_get_model.py new file mode 100644 index 0000000..69fa806 --- /dev/null +++ b/notebooks/vallei_en_veluwe/00_get_model.py @@ -0,0 +1,17 @@ +# %% +from ribasim_nl import CloudStorage + +cloud = CloudStorage() + +authority = "ValleienVeluwe" +short_name = "venv" + +cloud = CloudStorage() + +model_url = cloud.joinurl(authority, "modellen", f"{authority}_2024_6_3") +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", "model.toml") +if not ribasim_toml.exists(): + cloud.download_content(model_url) + +if ribasim_toml.exists(): # get a short_name version to differentiate QGIS layergroup + ribasim_toml.with_name(f"{short_name}.toml").write_text(ribasim_toml.read_text()) diff --git a/notebooks/vallei_en_veluwe/00_review_model_network.py b/notebooks/vallei_en_veluwe/00_review_model_network.py new file mode 100644 index 0000000..0b9f470 --- /dev/null +++ b/notebooks/vallei_en_veluwe/00_review_model_network.py @@ -0,0 +1,54 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "ValleienVeluwe" +short_name = "venv" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/notebooks/vallei_en_veluwe/01_fix_model_network.py b/notebooks/vallei_en_veluwe/01_fix_model_network.py new file mode 100644 index 0000000..77a0ba0 --- /dev/null +++ b/notebooks/vallei_en_veluwe/01_fix_model_network.py @@ -0,0 +1,211 @@ +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin, level_boundary, manning_resistance, outlet +from ribasim_nl import CloudStorage, Model, NetworkValidator +from shapely.geometry import MultiPolygon + +cloud = CloudStorage() + +authority = "ValleienVeluwe" +short_name = "venv" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") +split_line_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True +) + +# %% read model +model = Model.read(ribasim_toml) +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_fix_model_network", f"{short_name}.toml") +network_validator = NetworkValidator(model) + +# %% some stuff we'll need again +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +level_data = level_boundary.Static(level=[0]) + +basin_data = [ + basin.Profile(level=[0.0, 1.0], area=[0.01, 1000.0]), + basin.Static( + drainage=[0.0], + potential_evaporation=[0.001 / 86400], + infiltration=[0.0], + precipitation=[0.005 / 86400], + ), + basin.State(level=[0]), +] +outlet_data = outlet.Static(flow_rate=[100]) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401873626 +# Verwijderen duplicate edges + +model.edge.df.drop_duplicates(inplace=True) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401876430 + +# Toevoegen ontbrekende basins (oplossen topologie) +basin_edges_df = network_validator.edge_incorrect_connectivity() +basin_nodes_df = network_validator.node_invalid_connectivity() +basin_edges_df.to_file("basin_edges.gpkg") +basin_nodes_df.to_file("basin_nodes.gpkg") + +for row in basin_nodes_df.itertuples(): + # maak basin-node + basin_node = model.basin.add(Node(geometry=row.geometry), tables=basin_data) + + # update edge_table + model.edge.df.loc[basin_edges_df[basin_edges_df.from_node_id == row.node_id].index, ["from_node_id"]] = ( + basin_node.node_id + ) + model.edge.df.loc[basin_edges_df[basin_edges_df.to_node_id == row.node_id].index, ["to_node_id"]] = ( + basin_node.node_id + ) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401959032 + +# Oplossen verkeerde takrichting +for edge_id in [1353, 933, 373, 401, 4, 1338]: + model.reverse_edge(edge_id=edge_id) + +# model.invalid_topology_at_node().to_file("topo_errors.gpkg") + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402031275 + +# Veluwemeer at Harderwijk verwijderen +for node_id in [24, 694]: + model.remove_node(node_id, remove_edges=True) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402229646 + +# Veluwemeer at Elburg verwijderen +for node_id in [3, 1277]: + model.remove_node(node_id, remove_edges=True) + +# %% https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402257101 + +model.fix_unassigned_basin_area() + +# %% https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402281396 + +# Verwijderen basins zonder area of toevoegen/opknippen basin /area +model.split_basin(line=split_line_gdf.at[1, "geometry"]) +model.split_basin(line=split_line_gdf.at[2, "geometry"]) +model.split_basin(line=split_line_gdf.at[3, "geometry"]) +model.merge_basins(basin_id=1150, to_basin_id=1101) +model.merge_basins(basin_id=1196, to_basin_id=1192) +model.merge_basins(basin_id=1202, to_basin_id=1049) +model.merge_basins(basin_id=1207, to_basin_id=837) +model.merge_basins(basin_id=1208, to_basin_id=851, are_connected=False) +model.merge_basins(basin_id=1210, to_basin_id=1090) +model.merge_basins(basin_id=1212, to_basin_id=823) +model.merge_basins(basin_id=1216, to_basin_id=751, are_connected=False) +model.merge_basins(basin_id=1217, to_basin_id=752) +model.merge_basins(basin_id=1219, to_basin_id=814) +model.merge_basins(basin_id=1220, to_basin_id=1118) +model.merge_basins(basin_id=1221, to_basin_id=1170) +model.update_node(1229, "LevelBoundary", data=[level_data]) +model.merge_basins(basin_id=1254, to_basin_id=1091, are_connected=False) +model.merge_basins(basin_id=1260, to_basin_id=1125, are_connected=False) +model.merge_basins(basin_id=1263, to_basin_id=863) +model.merge_basins(basin_id=1265, to_basin_id=974) +model.remove_node(node_id=539, remove_edges=True) +model.merge_basins(basin_id=1267, to_basin_id=1177, are_connected=False) +model.remove_node(1268, remove_edges=True) +model.remove_node(360, remove_edges=True) +model.remove_node(394, remove_edges=True) +model.merge_basins(basin_id=1269, to_basin_id=1087) +model.merge_basins(basin_id=1149, to_basin_id=1270, are_connected=False) + + +model.fix_unassigned_basin_area() +model.basin.area.df = model.basin.area.df[~model.basin.area.df.index.isin(model.unassigned_basin_area.index)] + +# %% +# corrigeren knoop-topologie + +# ManningResistance bovenstrooms LevelBoundary naar Outlet +for row in network_validator.edge_incorrect_type_connectivity().itertuples(): + model.update_node(row.from_node_id, "Outlet", data=[outlet_data]) + +# Inlaten van ManningResistance naar Outlet +for row in network_validator.edge_incorrect_type_connectivity( + from_node_type="LevelBoundary", to_node_type="ManningResistance" +).itertuples(): + model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) + + +# buffer out small slivers +model.basin.area.df.loc[:, ["geometry"]] = ( + model.basin.area.df.buffer(0.1) + .buffer(-0.1) + .apply(lambda x: x if x.geom_type == "MultiPolygon" else MultiPolygon([x])) +) + +# basin-profielen updaten + +df = pd.DataFrame( + { + "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), + "level": [0.0, 1.0] * len(model.basin.node.df), + "area": [0.01, 1000.0] * len(model.basin.node.df), + } +) +df.index.name = "fid" +model.basin.profile.df = df + +df = model.basin.profile.df.groupby("node_id")[["level"]].max().reset_index() +df.index.name = "fid" +model.basin.state.df = df + +# %% +# tabulated_rating_curves updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.tabulated_rating_curve.node.df.index.to_numpy(), 2), + "level": [0.0, 5] * len(model.tabulated_rating_curve.node.df), + "flow_rate": [0, 0.1] * len(model.tabulated_rating_curve.node.df), + } +) +df.index.name = "fid" +model.tabulated_rating_curve.static.df = df + + +# %% + +# level_boundaries updaten +df = pd.DataFrame( + { + "node_id": model.level_boundary.node.df.index.to_list(), + "level": [0.0] * len(model.level_boundary.node.df), + } +) +df.index.name = "fid" +model.level_boundary.static.df = df + +# %% +# manning_resistance updaten +length = len(model.manning_resistance.node.df) +df = pd.DataFrame( + { + "node_id": model.manning_resistance.node.df.index.to_list(), + "length": [100.0] * length, + "manning_n": [100.0] * length, + "profile_width": [100.0] * length, + "profile_slope": [100.0] * length, + } +) +df.index.name = "fid" +model.manning_resistance.static.df = df + + +# %% write model +# model.use_validation = True +model.write(ribasim_toml) + +# %% diff --git a/notebooks/vechtstromen/00_review_model_network.py b/notebooks/vechtstromen/00_review_model_network.py new file mode 100644 index 0000000..bab28cc --- /dev/null +++ b/notebooks/vechtstromen/00_review_model_network.py @@ -0,0 +1,58 @@ +# %% + +import pandas as pd +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "Vechtstromen" +short_name = "vechtstromen" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +model = Model.read(ribasim_toml) +network_validator = NetworkValidator(model) + +verwerkt_dir = cloud.joinpath(authority, "verwerkt") +verwerkt_dir.mkdir(exist_ok=True) + +modelfouten_gpkg = cloud.joinpath(authority, "verwerkt", "modelfouten.gpkg") + +# %% verwijderen duplicated edges + +duplicated_edges = len(model.edge.df[model.edge.df.duplicated()]) +model.edge.df.drop_duplicates(inplace=True) + +# %% wegschrijven fouten + +# niet-bestaande fouten +mask = model.edge.df.to_node_id.isin(model.node_table().df.index) & model.edge.df.from_node_id.isin( + model.node_table().df.index +) + +edge_mist_node_df = model.edge.df[~mask] +model.edge.df = model.edge.df[mask] + +mask = model.edge.df.geometry.length == 0 +model.edge.df[mask].centroid.to_file(modelfouten_gpkg, layer="edge_zonder_lengte") +model.edge.df = model.edge.df[~mask] + +# niet-gekoppelde areas +model.basin.area.df[~model.basin.area.df.node_id.isin(model.basin.node.df.index)].to_file( + modelfouten_gpkg, layer="area_niet_een_basin" +) + +model.basin.node.df[~model.basin.node.df.index.isin(model.basin.area.df.node_id)].to_file( + modelfouten_gpkg, layer="basin_zonder_area" +) + +# ontbrekende basins +network_validator.node_invalid_connectivity().to_file(modelfouten_gpkg, layer="node_mist") +pd.concat([network_validator.edge_incorrect_connectivity(), edge_mist_node_df]).to_file( + modelfouten_gpkg, layer="ege_mist_node" +) + +# nodes met verkeerde richting + +model.invalid_topology_at_node().to_file(modelfouten_gpkg, layer="node_met_verkeerde_instroom_uitstroom_egde") diff --git a/src/ribasim_nl/ribasim_nl/geometry.py b/src/ribasim_nl/ribasim_nl/geometry.py index 1457da7..439e374 100644 --- a/src/ribasim_nl/ribasim_nl/geometry.py +++ b/src/ribasim_nl/ribasim_nl/geometry.py @@ -104,8 +104,7 @@ def split_basin(basin_polygon: Polygon, line: LineString) -> MultiPolygon: return MultiPolygon(sort_basins(keep_polys)) -def split_basin_multi_polygon(basin_polygon: MultiPolygon, line: LineString) -> tuple[MultiPolygon, MultiPolygon]: - """Split a MultiPolygon into two given a LineString.""" +def split_basin_multi_polygon(basin_polygon: MultiPolygon, line: LineString): line_centre = line.interpolate(0.5, normalized=True) # get the polygon to cut diff --git a/src/ribasim_nl/ribasim_nl/model.py b/src/ribasim_nl/ribasim_nl/model.py index 0a9a8f6..747ada3 100644 --- a/src/ribasim_nl/ribasim_nl/model.py +++ b/src/ribasim_nl/ribasim_nl/model.py @@ -1,11 +1,13 @@ from pathlib import Path from typing import Literal +import geopandas as gpd import networkx as nx import pandas as pd from pydantic import BaseModel from ribasim import Model, Node from ribasim.geometry.edge import NodeData +from ribasim.validation import flow_edge_neighbor_amount as edge_amount from shapely.geometry import LineString, MultiPolygon, Point, Polygon from shapely.geometry.base import BaseGeometry @@ -145,7 +147,7 @@ def remove_node(self, node_id: int, remove_edges: bool = False): for attr in table.model_fields.keys(): df = getattr(table, attr).df if df is not None: - if "node_id" in df.columns: + if node_id in df.columns: getattr(table, attr).df = df[df.node_id != node_id] else: getattr(table, attr).df = df[df.index != node_id] @@ -159,24 +161,11 @@ def remove_node(self, node_id: int, remove_edges: bool = False): self.remove_edge( from_node_id=row.from_node_id, to_node_id=row.to_node_id, remove_disconnected_nodes=False ) + # remove from used node-ids so we can add it again in the same table if node_id in table._parent._used_node_ids: table._parent._used_node_ids.node_ids.remove(node_id) - def update_meta_properties(self, node_properties: dict, node_types: list | None = None): - """Set properties for all, or a selection of, node-types.""" - if node_types is None: - node_types = self.node_table().df.node_type.unique() - - for node_type in node_types: - table = getattr(self, pascal_to_snake_case(node_type)) - node_df = getattr(table, "node").df - if node_df is not None: - for key, value in node_properties.items(): - if not key.startswith("meta_"): - key = f"meta_{key}" - node_df.loc[:, [key]] = value - def update_node(self, node_id, node_type, data, node_properties: dict = {}): existing_node_type = self.node_table().df.at[node_id, "node_type"] @@ -404,11 +393,13 @@ def edge_to_node_type(self): node_df = self.node_table().df return self.edge.df.to_node_id.apply(lambda x: node_df.at[x, "node_type"] if x in node_df.index else None) - def split_basin(self, line: LineString): + def split_basin(self, line: LineString, basin_id: int | None = None): if self.basin.area.df is None: raise ValueError("provide basin / area table first") line_centre = line.interpolate(0.5, normalized=True) + if basin_id is not None: + basin_area_df = self.basin.area.df.loc[self.basin.area.df.node_id == basin_id] basin_area_df = self.basin.area.df[self.basin.area.df.contains(line_centre)] if len(basin_area_df) != 1: @@ -516,3 +507,84 @@ def merge_basins(self, basin_id: int, to_basin_id: int, are_connected=True): if self.basin.area.df.crs is None: self.basin.area.df.crs = self.crs + + def invalid_topology_at_node(self, edge_type: str = "flow") -> gpd.GeoDataFrame: + df_graph = self.edge.df + df_node = self.node_table().df + # Join df_edge with df_node to get to_node_type + df_graph = df_graph.join(df_node[["node_type"]], on="from_node_id", how="left", rsuffix="_from") + df_graph = df_graph.rename(columns={"node_type": "from_node_type"}) + + df_graph = df_graph.join(df_node[["node_type"]], on="to_node_id", how="left", rsuffix="_to") + df_graph = df_graph.rename(columns={"node_type": "to_node_type"}) + df_node = self.node_table().df + + """Check if the neighbor amount of the two nodes connected by the given edge meet the minimum requirements.""" + errors = [] + + # filter graph by edge type + df_graph = df_graph.loc[df_graph["edge_type"] == edge_type] + + # count occurrence of "from_node" which reflects the number of outneighbors + from_node_count = ( + df_graph.groupby("from_node_id").size().reset_index(name="from_node_count") # type: ignore + ) + + # append from_node_count column to from_node_id and from_node_type + from_node_info = ( + df_graph[["from_node_id", "from_node_type"]] + .drop_duplicates() + .merge(from_node_count, on="from_node_id", how="left") + ) + from_node_info = from_node_info[["from_node_id", "from_node_count", "from_node_type"]] + + # add the node that is not the upstream of any other nodes + from_node_info = self._add_source_sink_node(df_node["node_type"], from_node_info, "from") + + # loop over all the "from_node" and check if they have enough outneighbor + for _, row in from_node_info.iterrows(): + # from node's outneighbor + if row["from_node_count"] < edge_amount[row["from_node_type"]][2]: + node_id = row["from_node_id"] + errors += [ + { + "geometry": df_node.at[node_id, "geometry"], + "node_id": node_id, + "node_type": df_node.at[node_id, "node_type"], + "exception": f"must have at least {edge_amount[row['from_node_type']][2]} outneighbor(s) (got {row['from_node_count']})", + } + ] + + # count occurrence of "to_node" which reflects the number of inneighbors + to_node_count = ( + df_graph.groupby("to_node_id").size().reset_index(name="to_node_count") # type: ignore + ) + + # append to_node_count column to result + to_node_info = ( + df_graph[["to_node_id", "to_node_type"]].drop_duplicates().merge(to_node_count, on="to_node_id", how="left") + ) + to_node_info = to_node_info[["to_node_id", "to_node_count", "to_node_type"]] + + # add the node that is not the downstream of any other nodes + to_node_info = self._add_source_sink_node(df_node["node_type"], to_node_info, "to") + + # loop over all the "to_node" and check if they have enough inneighbor + for _, row in to_node_info.iterrows(): + if row["to_node_count"] < edge_amount[row["to_node_type"]][0]: + node_id = row["to_node_id"] + errors += [ + { + "geometry": df_node.at[node_id, "geometry"], + "node_id": node_id, + "node_type": df_node.at[node_id, "node_type"], + "exception": f"must have at least {edge_amount[row['to_node_type']][0]} inneighbor(s) (got {row['to_node_count']})", + } + ] + + if len(errors) > 0: + return gpd.GeoDataFrame(errors, crs=self.crs).set_index("node_id") + else: + return gpd.GeoDataFrame( + [], columns=["node_id", "node_type", "exception"], geometry=gpd.GeoSeries(crs=self.crs) + ).set_index("node_id") diff --git a/src/ribasim_nl/ribasim_nl/network_validator.py b/src/ribasim_nl/ribasim_nl/network_validator.py index 13cb07d..057fee3 100644 --- a/src/ribasim_nl/ribasim_nl/network_validator.py +++ b/src/ribasim_nl/ribasim_nl/network_validator.py @@ -69,7 +69,7 @@ def node_internal_basin(self): mask = self.node_df.apply(lambda row: check_internal_basin(row, self.edge_df), axis=1) return self.node_df[mask] - def node_invalid_connectivity(self, tolerance: float = 1.0) -> gpd.GeoDataFrame: + def node_invalid_connectivity(self, tolerance: float = 1.0): """Check if node_from and node_to are correct on edge""" node_df = self.node_df invalid_edges_df = self.edge_incorrect_connectivity() From 4a8ae3c7b14822c96401afaff5fe7a3d525ecc6e Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Mon, 21 Oct 2024 13:25:34 +0200 Subject: [PATCH 13/23] Peilbeheerst oktober (#169) By @rbruijnshkv. --------- Co-authored-by: rbruijnshkv --- .../01_test_parse_crossings.ipynb | 6 +- .../02_crossings_to_ribasim_notebook.ipynb | 41 +- .../AmstelGooienVecht_parametrize.ipynb | 327 ++++----- .../sturing_AmstelGooienVecht.json | 193 +++--- .../Parametrize/sturing_Scheldestromen.json | 145 ++++ .../peilbeheerst_model/add_storage_basins.py | 442 +++++------- .../crossings_to_ribasim.py | 141 ++-- .../ribasim_parametrization.py | 633 +++++++++++++++--- 8 files changed, 1213 insertions(+), 715 deletions(-) create mode 100644 src/peilbeheerst_model/Parametrize/sturing_Scheldestromen.json diff --git a/src/peilbeheerst_model/01_test_parse_crossings.ipynb b/src/peilbeheerst_model/01_test_parse_crossings.ipynb index 4df0fd6..55b8c3d 100644 --- a/src/peilbeheerst_model/01_test_parse_crossings.ipynb +++ b/src/peilbeheerst_model/01_test_parse_crossings.ipynb @@ -577,9 +577,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python [conda env:ribasim]", + "display_name": "default", "language": "python", - "name": "conda-env-ribasim-py" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -591,7 +591,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb b/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb index 92082cb..f1d6006 100644 --- a/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb +++ b/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb @@ -108,30 +108,30 @@ "# insert the individual model modules in an actual model\n", "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", "\n", - "model.edge = edge\n", + "model.edge.df = edge\n", "\n", - "model.basin.node = basin_node\n", + "model.basin.node.df = basin_node\n", "model.basin.profile = basin_profile\n", "model.basin.static = basin_static\n", "model.basin.state = basin_state\n", "model.basin.area = basin_area\n", "\n", - "model.pump.node = pump_node\n", + "model.pump.node.df = pump_node\n", "model.pump.static = pump_static\n", "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", + "model.tabulated_rating_curve.node.df = tabulated_rating_curve_node\n", "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", "\n", - "model.manning_resistance.node = manning_resistance_node\n", + "model.manning_resistance.node.df = manning_resistance_node\n", "model.manning_resistance.static = manning_resistance_static\n", "\n", - "model.level_boundary.node = level_boundary_node\n", + "model.level_boundary.node.df = level_boundary_node\n", "model.level_boundary.static = level_boundary_static\n", "\n", - "model.flow_boundary.node = flow_boundary_node\n", + "model.flow_boundary.node.df = flow_boundary_node\n", "model.flow_boundary.static = flow_boundary_static\n", "\n", - "model.terminal.node = terminal_node\n", + "model.terminal.node.df = terminal_node\n", "\n", "# add checks and metadata\n", "checks = network.check(model, post_processed_data=post_processed_data, crossings=crossings)\n", @@ -789,37 +789,36 @@ "# insert the individual model modules in an actual model\n", "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", "\n", - "model.edge = edge\n", + "model.edge.df = edge\n", "\n", - "model.basin.node = basin_node\n", + "model.basin.node.df = basin_node\n", "model.basin.profile = basin_profile\n", "model.basin.static = basin_static\n", "model.basin.state = basin_state\n", "model.basin.area = basin_area\n", "\n", - "model.pump.node = pump_node\n", + "model.pump.node.df = pump_node\n", "model.pump.static = pump_static\n", "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", + "model.tabulated_rating_curve.node.df = tabulated_rating_curve_node\n", "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", "\n", - "model.level_boundary.node = level_boundary_node\n", + "model.manning_resistance.node.df = manning_resistance_node\n", + "model.manning_resistance.static = manning_resistance_static\n", + "\n", + "model.level_boundary.node.df = level_boundary_node\n", "model.level_boundary.static = level_boundary_static\n", "\n", - "model.flow_boundary.node = flow_boundary_node\n", + "model.flow_boundary.node.df = flow_boundary_node\n", "model.flow_boundary.static = flow_boundary_static\n", "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", + "model.terminal.node.df = terminal_node\n", "\n", "# add checks and metadata\n", - "checks = network.check(post_processed_data=post_processed_data, crossings=crossings, model=model)\n", + "checks = network.check(model, post_processed_data=post_processed_data, crossings=crossings)\n", "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", "\n", "# write the result\n", - "# stop\n", "network.WriteResults(model=model, checks=checks)" ] }, @@ -1187,7 +1186,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb b/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb index 4a3f38b..b396128 100644 --- a/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb +++ b/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb @@ -7,7 +7,6 @@ "outputs": [], "source": [ "import datetime\n", - "import logging\n", "import os\n", "import pathlib\n", "import warnings\n", @@ -17,7 +16,7 @@ "import peilbeheerst_model.ribasim_parametrization as ribasim_param\n", "import ribasim\n", "import ribasim.nodes\n", - "from peilbeheerst_model.add_storage_basins import AddStorageBasin\n", + "from peilbeheerst_model.add_storage_basins import AddStorageBasins\n", "from peilbeheerst_model.controle_output import *\n", "from peilbeheerst_model.ribasim_feedback_processor import RibasimFeedbackProcessor\n", "\n", @@ -26,62 +25,6 @@ "warnings.filterwarnings(\"ignore\")" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# import datetime\n", - "# import logging\n", - "# import os\n", - "# import pathlib\n", - "# import sys\n", - "# import warnings\n", - "# from pathlib import Path\n", - "\n", - "# import pandas as pd\n", - "# import ribasim\n", - "# import ribasim.nodes\n", - "\n", - "# #load ribasim_nl\n", - "# module_path = Path.cwd() / '../../ribasim_nl/'\n", - "# sys.path.append(str(module_path))\n", - "\n", - "# current_dir = os.getcwd()\n", - "# parent_dir = os.path.dirname(current_dir)\n", - "# if parent_dir not in sys.path:\n", - "# sys.path.append(parent_dir)\n", - "\n", - "# %reload_ext autoreload\n", - "# %autoreload 2\n", - "# warnings.filterwarnings('ignore')\n", - "\n", - "# import peilbeheerst_model.ribasim_parametrization as ribasim_param\n", - "# from peilbeheerst_model.add_storage_basins import AddStorageBasin\n", - "# from peilbeheerst_model.controle_output import *\n", - "# from peilbeheerst_model.ribasim_feedback_processor import RibasimFeedbackProcessor\n", - "\n" - ] - }, - { - "cell_type": "raw", - "metadata": {}, - "source": [ - "\"\"\"\n", - "Model Aannamens:\n", - "- Max area van hydroobjecten is 10% van basin area, 90% bij boezem\n", - "- Initial state op streefpeil\n", - "\"\"\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "markdown", "metadata": {}, @@ -102,8 +45,6 @@ "metadata": {}, "outputs": [], "source": [ - "# Set paths\n", - "# work_dir = pathlib.Path(\"../../../../../Ribasim_networks/Waterschappen/AmstelGooienVecht/modellen/AmstelGooienVecht_boezemmodel\")\n", "waterschap = \"AmstelGooienVecht\"\n", "work_dir = pathlib.Path(f\"../../../../../Ribasim_updated_models/{waterschap}/modellen/{waterschap}_parametrized\")\n", "ribasim_gpkg = work_dir.joinpath(\"database.gpkg\")\n", @@ -123,7 +64,7 @@ "timesteps = 2\n", "delta_crest_level = 0.1 # delta waterlevel of boezem compared to streefpeil till no water can flow through an outlet\n", "\n", - "default_level = 0 # default LevelBoundary level" + "default_level = -0.42 # default LevelBoundary level" ] }, { @@ -140,7 +81,7 @@ "outputs": [], "source": [ "name = \"Ron Bruijns (HKV)\"\n", - "versie = \"2024_8_8\"\n", + "versie = \"2024_10_5\"\n", "\n", "feedback_excel = pathlib.Path(f\"../../../../../Ribasim_feedback/V1_formulieren/feedback_formulier_{waterschap}.xlsx\")\n", "feedback_excel_processed = (\n", @@ -179,26 +120,21 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Model specific tweaks" + "# Parameterization" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "new_node_id = max(ribasim_model.edge.df.from_node_id.max(), ribasim_model.edge.df.to_node_id.max()) + 1" + "## Nodes" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df[\"node_id\"] == 195, \"meta_streefpeil\"] = -2.45\n", - "ribasim_model.basin.state.df.loc[ribasim_model.basin.state.df.node_id == 195, \"level\"] = -2.45" + "### Basin (characteristics)" ] }, { @@ -207,28 +143,32 @@ "metadata": {}, "outputs": [], "source": [ - "assert not pd.isnull(ribasim_model.basin.area.df.meta_streefpeil).any()" + "ribasim_param.validate_basin_area(ribasim_model)" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "# Parameterization" + "# remove the basins of above in the feedback form" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Nodes" + "## Model specific tweaks" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "### Basin (characteristics)" + "new_node_id = max(ribasim_model.edge.df.from_node_id.max(), ribasim_model.edge.df.to_node_id.max()) + 1" ] }, { @@ -237,9 +177,13 @@ "metadata": {}, "outputs": [], "source": [ - "# Define the initial state of each basin. Is set equal to the streefpeil\n", - "# ribasim_param.set_initial_basin_state(ribasim_model) #the initial states of the basins are by default already set to the streefpeil!\n", - "\n" + "# change unknown streefpeilen to a default streefpeil\n", + "ribasim_model.basin.area.df.loc[\n", + " ribasim_model.basin.area.df[\"meta_streefpeil\"] == \"Onbekend streefpeil\", \"meta_streefpeil\"\n", + "] = str(unknown_streefpeil)\n", + "ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df[\"meta_streefpeil\"] == -9.999, \"meta_streefpeil\"] = str(\n", + " unknown_streefpeil\n", + ")" ] }, { @@ -248,19 +192,9 @@ "metadata": {}, "outputs": [], "source": [ - "# Add storage basins\n", - "model_name = \"AmstelGooienVecht_StorageBasins\"\n", - "# node_ids = [1, 2, 3] # Specify node IDs to process or include_hoofdwater Boolean\n", - "# processor = AddStorageBasin(ribasim_toml, model_name, output_folder, include_hoofdwater=True, log=True, node_ids=None)\n", - "# ribasim_model = processor.run()\n", - "\n", - "processor = AddStorageBasin(\n", - " path_ribasim_toml, model_name, output_folder, include_hoofdwater=False, log=False, node_ids=None\n", - ")\n", - "ribasim_model = processor.run()\n", - "\n", - "# Check basin area\n", - "ribasim_param.validate_basin_area(ribasim_model)" + "ribasim_model.basin.area.df[\"meta_streefpeil\"] = ribasim_model.basin.area.df[\"meta_streefpeil\"].astype(float)\n", + "ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df.index == 195, \"meta_streefpeil\"] = -2.45\n", + "ribasim_model.basin.state.df.loc[ribasim_model.basin.state.df.index == 195, \"level\"] = -2.45" ] }, { @@ -269,22 +203,14 @@ "metadata": {}, "outputs": [], "source": [ - "# remove the basins of above in the feedback form" + "assert not pd.isnull(ribasim_model.basin.area.df.meta_streefpeil).any()" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# change unknown streefpeilen to a default streefpeil\n", - "ribasim_model.basin.area.df.loc[\n", - " ribasim_model.basin.area.df[\"meta_streefpeil\"] == \"Onbekend streefpeil\", \"meta_streefpeil\"\n", - "] = unknown_streefpeil\n", - "ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df[\"meta_streefpeil\"] == -9.999, \"meta_streefpeil\"] = (\n", - " unknown_streefpeil\n", - ")" + "## Implement standard profile and a storage basin" ] }, { @@ -303,6 +229,30 @@ ")" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# remove after the feedback forms have been fixed\n", + "ribasim_model.basin.profile.df.loc[ribasim_model.basin.profile.df.index == 559, \"area\"] = 1\n", + "ribasim_model.basin.profile.df.loc[ribasim_model.basin.profile.df.index == 560, \"area\"] = 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "add_storage_basins = AddStorageBasins(\n", + " ribasim_model=ribasim_model, exclude_hoofdwater=True, additional_basins_to_exclude=[]\n", + ")\n", + "\n", + "add_storage_basins.create_bergende_basins()" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -358,8 +308,7 @@ "metadata": {}, "outputs": [], "source": [ - "ribasim_param.Terminals_to_LevelBoundaries(ribasim_model=ribasim_model, default_level=default_level)\n", - "\n", + "ribasim_param.Terminals_to_LevelBoundaries(ribasim_model=ribasim_model, default_level=default_level) # clean\n", "ribasim_param.FlowBoundaries_to_LevelBoundaries(ribasim_model=ribasim_model, default_level=default_level)" ] }, @@ -401,7 +350,17 @@ "metadata": {}, "outputs": [], "source": [ - "ribasim_param.add_discrete_control(ribasim_model, waterschap, default_level)" + "ribasim_param.find_upstream_downstream_target_levels(ribasim_model, node=\"outlet\")\n", + "ribasim_param.find_upstream_downstream_target_levels(ribasim_model, node=\"pump\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# ribasim_param.add_discrete_control(ribasim_model, waterschap, default_level)" ] }, { @@ -410,9 +369,7 @@ "metadata": {}, "outputs": [], "source": [ - "ribasim_model.discrete_control.node.df = ribasim_model.discrete_control.node.df.drop_duplicates(\n", - " subset=\"node_id\"\n", - ").reset_index(drop=True)" + "ribasim_param.determine_min_upstream_max_downstream_levels(ribasim_model, waterschap)" ] }, { @@ -436,7 +393,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Set numerical settings" + "## Last formating of the tables" ] }, { @@ -445,12 +402,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Write model output\n", - "ribasim_param.index_reset(ribasim_model)\n", - "ribasim_model.starttime = datetime.datetime(2024, 1, 1)\n", - "ribasim_model.endtime = datetime.datetime(2024, 1, 30)\n", - "ribasim_model.solver.saveat = 3600\n", - "ribasim_param.write_ribasim_model_Zdrive(ribasim_model, path_ribasim_toml)" + "# only retain node_id's which are present in the .node table\n", + "ribasim_param.clean_tables(ribasim_model)" ] }, { @@ -458,20 +411,15 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, "source": [ - "## Run Model" + "ribasim_model.edge.df[\"fid\"] = ribasim_model.edge.df.index.copy()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Iterate over tabulated rating curves" + "# Set numerical settings" ] }, { @@ -480,34 +428,7 @@ "metadata": {}, "outputs": [], "source": [ - "try:\n", - " ribasim_param.iterate_TRC(\n", - " ribasim_param=ribasim_param,\n", - " allowed_tolerance=0.02,\n", - " max_iter=1,\n", - " expected_difference=0.1,\n", - " max_adjustment=0.25,\n", - " cmd=[\"ribasim\", path_ribasim_toml],\n", - " output_dir=output_dir,\n", - " path_ribasim_toml=path_ribasim_toml,\n", - " )\n", - "\n", - "except Exception:\n", - " logging.error(\"The model was not able to run. Log file:\")\n", - " log_file_path = os.path.join(output_dir, \"ribasim.log\") # Update with the correct path to your log file\n", - " try:\n", - " with open(log_file_path) as log_file:\n", - " log_content = log_file.read()\n", - " print(log_content)\n", - " except Exception as log_exception:\n", - " logging.error(f\"Could not read the log file: {log_exception}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Write model" + "ribasim_model.use_validation = True" ] }, { @@ -516,30 +437,26 @@ "metadata": {}, "outputs": [], "source": [ - "# control_dict = Control(work_dir = work_dir).run_all()\n", - "ribasim_param.write_ribasim_model_GoodCloud(\n", - " ribasim_model=ribasim_model,\n", - " path_ribasim_toml=path_ribasim_toml,\n", - " waterschap=\"AmstelGooienVecht\",\n", - " modeltype=\"boezemmodel\",\n", - " include_results=True,\n", - ")" + "# Write model output\n", + "# ribasim_param.index_reset(ribasim_model)\n", + "ribasim_model.starttime = datetime.datetime(2024, 1, 1)\n", + "ribasim_model.endtime = datetime.datetime(2025, 1, 1)\n", + "ribasim_model.solver.saveat = 3600\n", + "ribasim_param.write_ribasim_model_Zdrive(ribasim_model, path_ribasim_toml)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "stop" + "## Run Model" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Open Output" + "## Iterate over tabulated rating curves" ] }, { @@ -548,17 +465,34 @@ "metadata": {}, "outputs": [], "source": [ - "df_basin = pd.read_feather(os.path.join(output_dir, \"basin.arrow\"))\n", - "df_basin" + "# try:\n", + "# ribasim_param.iterate_TRC(\n", + "# ribasim_param=ribasim_param,\n", + "# allowed_tolerance=0.02,\n", + "# max_iter=1,\n", + "# expected_difference=0.1,\n", + "# max_adjustment=0.25,\n", + "# cmd=[\"ribasim\", path_ribasim_toml],\n", + "# output_dir=output_dir,\n", + "# path_ribasim_toml=path_ribasim_toml,\n", + "# )\n", + "\n", + "# except Exception:\n", + "# logging.error(\"The model was not able to run. Log file:\")\n", + "# log_file_path = os.path.join(output_dir, \"ribasim.log\") # Update with the correct path to your log file\n", + "# try:\n", + "# with open(log_file_path) as log_file:\n", + "# log_content = log_file.read()\n", + "# print(log_content)\n", + "# except Exception as log_exception:\n", + "# logging.error(f\"Could not read the log file: {log_exception}\")" ] }, { "cell_type": "markdown", - "metadata": { - "jp-MarkdownHeadingCollapsed": true - }, + "metadata": {}, "source": [ - "### FlowBoundary" + "# Write model" ] }, { @@ -567,29 +501,21 @@ "metadata": {}, "outputs": [], "source": [ - "# Set FlowBoundary rate for each pump\n", - "# ribasim_model.flow_boundary.static.df['flow_rate'] = 0 #\n", - "\n" + "# control_dict = Control(work_dir = work_dir).run_all()\n", + "ribasim_param.write_ribasim_model_GoodCloud(\n", + " ribasim_model=ribasim_model,\n", + " path_ribasim_toml=path_ribasim_toml,\n", + " waterschap=waterschap,\n", + " modeltype=\"boezemmodel\",\n", + " include_results=True,\n", + ")" ] }, { "cell_type": "markdown", - "metadata": { - "jp-MarkdownHeadingCollapsed": true - }, - "source": [ - "### Add Discrete Control" - ] - }, - { - "cell_type": "code", - "execution_count": null, "metadata": {}, - "outputs": [], "source": [ - "# Add discrete control nodes and control edges\n", - "# ribasim_param.add_discrete_control_nodes(ribasim_model)\n", - "\n" + "## Open Output" ] }, { @@ -597,24 +523,16 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, "source": [ - "### TabulatedRatingCurve" + "df_basin = pd.read_feather(os.path.join(output_dir, \"basin.arrow\"))\n", + "df_basin" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# # First, set the Q(h)-relationship to a relationship which is at each TRC node the same\n", - "# ribasim_param.set_tabulated_rating_curves(ribasim_model, level_increase=1.0, flow_rate=1)\n", - "\n" + "### Add discrete control nodes" ] }, { @@ -623,9 +541,8 @@ "metadata": {}, "outputs": [], "source": [ - "# # The water level in the boezem is regulated heavily. There is a smaller tolerance for an increase in water level.\n", - "# # Hence, the TRC's which discharge water from the boezem to a Terminal should have a higher capacity.\n", - "# ribasim_param.set_tabulated_rating_curves_boundaries(ribasim_model, level_increase=0.10, flow_rate=40)\n", + "# Add discrete control nodes and control edges\n", + "# ribasim_param.add_discrete_control_nodes(ribasim_model)\n", "\n" ] } @@ -646,7 +563,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json b/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json index 1e34cf2..47bfc90 100644 --- a/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json +++ b/src/peilbeheerst_model/Parametrize/sturing_AmstelGooienVecht.json @@ -1,136 +1,145 @@ { "Inlaat boezem, stuw": { - "upstream_level_offset": 0.10, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "block"], - "flow_rate_block": 0, - "flow_rate_pass": 2.0, - "node_type": "outlet" + "upstream_level_offset": 0.10, + "downstream_level_offset": 0, + "max_flow_rate": 1 }, "Uitlaat boezem, stuw": { - "upstream_level_offset": 0.00, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 2.0, - "node_type": "outlet" + "upstream_level_offset": 0, + "downstream_level_offset": 0.10, + "max_flow_rate": 1 }, "Reguliere stuw": { - "upstream_level_offset": 0.15, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 1.0, - "node_type": "outlet" + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 0.5 }, "Inlaat buitenwater peilgebied, stuw": { - "upstream_level_offset": 0.10, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["pass", "block", "pass", "block"], - "flow_rate_block": 0, - "flow_rate_pass": 1.0, - "node_type": "outlet" + "upstream_level_offset": 0.15, + "downstream_level_offset": 0, + "max_flow_rate": 1 }, "Uitlaat buitenwater peilgebied, stuw": { - "upstream_level_offset": 0.10, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 1.0, - "node_type": "outlet" + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 1 + }, + + "Uitlaat buitenwater boezem, stuw": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.30, + "max_flow_rate": 10 + }, + + "Inlaat buitenwater boezem, stuw": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0, + "max_flow_rate": 10 }, "Boezem boezem, stuw": { - "upstream_level_offset": 0.10, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 10.0, - "node_type": "outlet" + "upstream_level_offset": 0, + "downstream_level_offset": 0.10, + "max_flow_rate": 5 }, - "Inlaat boezem, gemaal": { - "upstream_level_offset": 0.10, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "block"], - "flow_rate_block": 0, - "flow_rate_pass": 0.20, - "node_type": "pump" + "Inlaat boezem, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.30, + "max_flow_rate": 0.2 }, - "Uitlaat boezem, gemaal": { - "upstream_level_offset": 0.00, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 0.20, - "node_type": "pump" + "Inlaat boezem, aanvoer gemaal": { + "upstream_level_offset": 0.20, + "downstream_level_offset": 0, + "max_flow_rate": 0.1 + }, + + "Uitlaat boezem, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.20, + "max_flow_rate": 0.2 + }, + + "Uitlaat boezem, aanvoer gemaal": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0, + "max_flow_rate": 0.1 }, "Regulier afvoer gemaal": { - "upstream_level_offset": 0.00, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 0.20, - "node_type": "pump" + "upstream_level_offset": 0, + "downstream_level_offset": 0.20, + "max_flow_rate": 0.1 }, "Regulier aanvoer gemaal": { - "upstream_level_offset": 0.15, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["pass", "block", "pass", "block"], - "flow_rate_block": 0, - "flow_rate_pass": 0.10, - "node_type": "pump" + "upstream_level_offset": 0.15, + "downstream_level_offset": 0, + "max_flow_rate": 0.1 }, "Uitlaat buitenwater peilgebied, afvoer gemaal": { - "upstream_level_offset": 0.15, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 0.2, - "node_type": "pump" + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 0.2 }, "Uitlaat buitenwater peilgebied, aanvoer gemaal": { - "upstream_level_offset": 0.15, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["pass", "block", "pass", "block"], - "flow_rate_block": 0, - "flow_rate_pass": 0.1, - "node_type": "pump" + "upstream_level_offset": 0.15, + "downstream_level_offset": 0.0500, + "max_flow_rate": 0.1 }, "Inlaat buitenwater peilgebied, afvoer gemaal": { - "upstream_level_offset": 0.00, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 0.2, - "node_type": "pump" + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 0.2 }, "Inlaat buitenwater peilgebied, aanvoer gemaal": { - "upstream_level_offset": 0.15, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["pass", "block", "pass", "block"], - "flow_rate_block": 0, - "flow_rate_pass": 0.1, - "node_type": "pump" + "upstream_level_offset": 0.15, + "downstream_level_offset": 0.0500, + "max_flow_rate": 0.1 + }, + + "Inlaat buitenwater boezem, afvoer gemaal": { + "upstream_level_offset": 0.0500, + "downstream_level_offset": 0.30, + "max_flow_rate": 10 + }, + + "Inlaat buitenwater boezem, aanvoer gemaal": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0.0500, + "max_flow_rate": 5 + }, + + "Uitlaat buitenwater boezem, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.30, + "max_flow_rate": 5 + }, + + "Uitlaat buitenwater boezem, aanvoer gemaal": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0.05, + "max_flow_rate": 5 }, "Boezem boezem, afvoer gemaal": { - "upstream_level_offset": 0.00, - "truth_state": ["FF", "FT", "TF", "TT"], - "control_state": ["block", "block", "pass", "pass"], - "flow_rate_block": 0, - "flow_rate_pass": 10.0, - "node_type": "pump" + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 10 + }, + + "Boezem boezem, aanvoer gemaal": { + "upstream_level_offset": 0.15, + "downstream_level_offset": 0, + "max_flow_rate": 5 } } diff --git a/src/peilbeheerst_model/Parametrize/sturing_Scheldestromen.json b/src/peilbeheerst_model/Parametrize/sturing_Scheldestromen.json new file mode 100644 index 0000000..47bfc90 --- /dev/null +++ b/src/peilbeheerst_model/Parametrize/sturing_Scheldestromen.json @@ -0,0 +1,145 @@ +{ + "Inlaat boezem, stuw": { + "upstream_level_offset": 0.10, + "downstream_level_offset": 0, + "max_flow_rate": 1 + }, + + "Uitlaat boezem, stuw": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.10, + "max_flow_rate": 1 + }, + + "Reguliere stuw": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 0.5 + }, + + "Inlaat buitenwater peilgebied, stuw": { + "upstream_level_offset": 0.15, + "downstream_level_offset": 0, + "max_flow_rate": 1 + }, + + "Uitlaat buitenwater peilgebied, stuw": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 1 + }, + + "Uitlaat buitenwater boezem, stuw": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.30, + "max_flow_rate": 10 + }, + + "Inlaat buitenwater boezem, stuw": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0, + "max_flow_rate": 10 + }, + + "Boezem boezem, stuw": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.10, + "max_flow_rate": 5 + }, + + "Inlaat boezem, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.30, + "max_flow_rate": 0.2 + }, + + "Inlaat boezem, aanvoer gemaal": { + "upstream_level_offset": 0.20, + "downstream_level_offset": 0, + "max_flow_rate": 0.1 + }, + + "Uitlaat boezem, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.20, + "max_flow_rate": 0.2 + }, + + "Uitlaat boezem, aanvoer gemaal": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0, + "max_flow_rate": 0.1 + }, + + "Regulier afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.20, + "max_flow_rate": 0.1 + }, + + "Regulier aanvoer gemaal": { + "upstream_level_offset": 0.15, + "downstream_level_offset": 0, + "max_flow_rate": 0.1 + }, + + "Uitlaat buitenwater peilgebied, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 0.2 + }, + + "Uitlaat buitenwater peilgebied, aanvoer gemaal": { + "upstream_level_offset": 0.15, + "downstream_level_offset": 0.0500, + "max_flow_rate": 0.1 + }, + + "Inlaat buitenwater peilgebied, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 0.2 + }, + + "Inlaat buitenwater peilgebied, aanvoer gemaal": { + "upstream_level_offset": 0.15, + "downstream_level_offset": 0.0500, + "max_flow_rate": 0.1 + }, + + "Inlaat buitenwater boezem, afvoer gemaal": { + "upstream_level_offset": 0.0500, + "downstream_level_offset": 0.30, + "max_flow_rate": 10 + }, + + "Inlaat buitenwater boezem, aanvoer gemaal": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0.0500, + "max_flow_rate": 5 + }, + + "Uitlaat buitenwater boezem, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.30, + "max_flow_rate": 5 + }, + + "Uitlaat buitenwater boezem, aanvoer gemaal": { + "upstream_level_offset": 0.30, + "downstream_level_offset": 0.05, + "max_flow_rate": 5 + }, + + "Boezem boezem, afvoer gemaal": { + "upstream_level_offset": 0, + "downstream_level_offset": 0.15, + "max_flow_rate": 10 + }, + + "Boezem boezem, aanvoer gemaal": { + "upstream_level_offset": 0.15, + "downstream_level_offset": 0, + "max_flow_rate": 5 + } +} diff --git a/src/peilbeheerst_model/peilbeheerst_model/add_storage_basins.py b/src/peilbeheerst_model/peilbeheerst_model/add_storage_basins.py index 5c7cf43..971ee70 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/add_storage_basins.py +++ b/src/peilbeheerst_model/peilbeheerst_model/add_storage_basins.py @@ -1,285 +1,181 @@ -import logging -from pathlib import Path - import pandas as pd -from ribasim import Model -from shapely.geometry import Point - - -class NoGeometryFilter(logging.Filter): - def filter(self, record): - return not record.getMessage().startswith("[") - - -class AddStorageBasin: - def __init__(self, ribasim_toml, model_name, output_folder, include_hoofdwater=False, log=True, node_ids=None): - """ - Initialize the AddStorageBasin class. - - :param ribasim_toml: Path to the ribasim TOML file - :param model_name: Name of the model - :param output_folder: Folder to output the results - :param include_hoofdwater: Boolean flag to include hoofdwater in processing - :param log: Boolean flag to enable logging - :param node_ids: List of node IDs to process, if specified - """ - # Parse input - self.ribasim_toml = ribasim_toml - self.model_name = model_name - self.output_folder = output_folder - self.include_hoofdwater = include_hoofdwater - self.log = log - self.node_ids = node_ids - # Load model - self.model = self.load_ribasim_model(ribasim_toml) - # Set logging - if self.log is True: - self.log_filename = Path(output_folder) / f"{model_name}.log" - self.setup_logging() - - def setup_logging(self): - """Set up logging to file and console.""" - # Clear any existing handlers - for handler in logging.root.handlers[:]: - logging.root.removeHandler(handler) - - # Setup logging to file - logging.basicConfig( - filename=self.log_filename, - level=logging.DEBUG, - format="%(asctime)s - %(levelname)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", +from shapely.geometry import LineString + + +class AddStorageBasins: + def __init__(self, ribasim_model, exclude_hoofdwater, additional_basins_to_exclude, distance_bergende_basin=10): + self.ribasim_model = ribasim_model + self.exclude_hoofdwater = exclude_hoofdwater + self.additional_basins_to_exclude = additional_basins_to_exclude + self.distance_bergende_basin = distance_bergende_basin + + # duplicate the doorgaande basins: + # retrieve the ids of the doorgaande basins, exlude the basins_to_exclude + # copy the .node, .static, .state, .profile table + + def create_bergende_basins(self): + doorgaande_basin_ids = self.ribasim_model.basin.state.df.copy() + + # exclude (possibly) hoofdwater basins, as the majority of the area is meant for doorgaand water + if self.exclude_hoofdwater: + doorgaande_basin_ids = doorgaande_basin_ids.loc[doorgaande_basin_ids.meta_categorie == "doorgaand"] + + # exclude (possibly) other basins to create a storage basin + if self.additional_basins_to_exclude is not None and len(self.additional_basins_to_exclude) > 0: + doorgaande_basin_ids = doorgaande_basin_ids.loc[ + ~doorgaande_basin_ids.node_id.isin(self.additional_basins_to_exclude) + ] + + # convert to numbers + doorgaande_basin_ids = doorgaande_basin_ids.node_id.to_numpy() + + # retrieve the max node_id + max_node_id = self.get_current_max_nodeid() + + # duplicate all the tables + bergende_node = self.ribasim_model.basin.node.df.loc[ + self.ribasim_model.basin.node.df.index.isin(doorgaande_basin_ids) + ].copy() + bergende_static = self.ribasim_model.basin.static.df.loc[ + self.ribasim_model.basin.static.df.node_id.isin(doorgaande_basin_ids) + ].copy() + bergende_state = self.ribasim_model.basin.state.df.loc[ + self.ribasim_model.basin.state.df.node_id.isin(doorgaande_basin_ids) + ].copy() + bergende_profile = self.ribasim_model.basin.profile.df.loc[ + self.ribasim_model.basin.profile.df.node_id.isin(doorgaande_basin_ids) + ].copy() + bergende_area = self.ribasim_model.basin.area.df.loc[ + self.ribasim_model.basin.area.df.node_id.isin(doorgaande_basin_ids) + ].copy() + + # store the linked node_id of the bergende basin, and add the found max node_id. Plus one as we need to start counting from the next node_id + bergende_node["doorgaand_id"] = bergende_node.index.copy() + bergende_node.index = ( + max_node_id + bergende_node.index + ) # + 1 dont add the plus one here, as the index already starts at 1 + bergende_static.node_id = max_node_id + bergende_static.node_id # + 1 + bergende_state.node_id = max_node_id + bergende_state.node_id # + 1 + bergende_profile.node_id = max_node_id + bergende_profile.node_id # + 1 + bergende_area.node_id = max_node_id + bergende_area.node_id # + 1 + + # change the meta_categorie and column names from doorgaand to bergend + bergende_state.meta_categorie = "bergend" + + # add the geometry information for the bergende basin and the manning_resistance + bergende_node = bergende_node.rename( + columns={"geometry": "geometry_doorgaand"} + ) # store the geometry of the doorgaande basin + bergende_node["geometry_bergend"] = bergende_node["geometry_doorgaand"].translate( + xoff=self.distance_bergende_basin, yoff=0 + ) # create a bergende basin x meters to the right + bergende_node["geometry_manning"] = bergende_node["geometry_doorgaand"].translate( + xoff=self.distance_bergende_basin / 2, yoff=0 + ) # create a bergende manning resistance in the middle (halfway, thus divided by 2) + bergende_node["manning_id"] = ( + bergende_node.index.max() + bergende_node.index + 1 + ) # retrieve new max node id for the manning node + + # create edges from the nodes + def create_linestring(row, from_col, to_col): + return LineString([row[from_col], row[to_col]]) + + bergende_node["geometry_edge_bergend_to_MR"] = bergende_node.apply( + create_linestring, axis=1, from_col="geometry_bergend", to_col="geometry_manning" + ) + bergende_node["geometry_edge_MR_to_doorgaand"] = bergende_node.apply( + create_linestring, axis=1, from_col="geometry_manning", to_col="geometry_doorgaand" ) - # # Add console handler - # console_handler = logging.StreamHandler() - # console_handler.setLevel(logging.DEBUG) - # console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) - - # # Add custom filter to console handler - # console_handler.addFilter(NoGeometryFilter()) - - # logging.getLogger().addHandler(console_handler) + # create the manning resistance node table, conform Ribasim style + manning_node = bergende_node[["manning_id", "geometry_manning"]].copy().reset_index(drop=True) + manning_node = manning_node.rename(columns={"manning_id": "node_id", "geometry_manning": "geometry"}) + manning_node = manning_node.set_index("node_id") + manning_node["node_type"] = "ManningResistance" + manning_node["meta_node_id"] = manning_node.index + + # create the manning resistance static table, conform Ribasim style + manning_static = manning_node.reset_index()[["node_id"]].copy() + manning_static["length"] = 1000 + manning_static["manning_n"] = 0.02 + manning_static["profile_width"] = 2.0 + manning_static["profile_slope"] = 3.0 + + # create the edges table which goes from the bergende basin to the ManningResistance (MR) + edge_bergend_MR = pd.DataFrame() + edge_bergend_MR["from_node_id"] = ( + bergende_node.index.copy() + ) # the index is the bergende node_id, which is the starting point + edge_bergend_MR["to_node_id"] = bergende_node.manning_id.to_numpy() # it goes to the manning node + edge_bergend_MR["geometry"] = ( + bergende_node.geometry_edge_bergend_to_MR.values + ) # edge geometry was already created + edge_bergend_MR["edge_type"] = "flow" + edge_bergend_MR["meta_from_node_type"] = "Basin" # include metadata + edge_bergend_MR["meta_to_node_type"] = "ManningResistance" # include metadata + edge_bergend_MR["meta_categorie"] = "bergend" # include metadata + + # repeat the same, but then from the ManningResistance (MR) to the doorgaande + edge_MR_doorgaand = pd.DataFrame() + edge_MR_doorgaand["from_node_id"] = ( + bergende_node.manning_id.to_numpy() + ) # the starting point is the ManningResistance node + edge_MR_doorgaand["to_node_id"] = bergende_node.doorgaand_id.to_numpy() # it goes to the doorgaande basin + edge_MR_doorgaand["geometry"] = ( + bergende_node.geometry_edge_MR_to_doorgaand.values + ) # edge geometry was already created + edge_MR_doorgaand["edge_type"] = "flow" + edge_MR_doorgaand["meta_from_node_type"] = "ManningResistance" # include metadata + edge_MR_doorgaand["meta_to_node_type"] = "Basin" # include metadata + edge_MR_doorgaand["meta_categorie"] = "bergend" # include metadata. This is still bergend. + + # combine the edge tables + edge_bergend_all = pd.concat([edge_bergend_MR, edge_MR_doorgaand]).reset_index(drop=True) + edge_bergend_all["edge_id"] = ( + edge_bergend_all.index.copy() + self.ribasim_model.edge.df.index.max() + 1 + ) # start counting from the highest edge_id + edge_bergend_all = edge_bergend_all.set_index("edge_id") + + # clean the new node table, update the meta_node_id column + bergende_node = bergende_node[["node_type", "meta_node_id", "geometry_bergend"]] + bergende_node = bergende_node.rename(columns={"geometry_bergend": "geometry"}) + bergende_node["meta_node_id"] = bergende_node.index.copy() + + # concat all the new tables to the existing model + self.ribasim_model.basin.node.df = pd.concat([self.ribasim_model.basin.node.df, bergende_node]) + self.ribasim_model.basin.static = pd.concat([self.ribasim_model.basin.static.df, bergende_static]).reset_index( + drop=True + ) + self.ribasim_model.basin.state = pd.concat([self.ribasim_model.basin.state.df, bergende_state]).reset_index( + drop=True + ) + self.ribasim_model.basin.profile = pd.concat( + [self.ribasim_model.basin.profile.df, bergende_profile] + ).reset_index(drop=True) + self.ribasim_model.basin.area = pd.concat([self.ribasim_model.basin.area.df, bergende_area]).reset_index( + drop=True + ) - def load_ribasim_model(self, ribasim_toml): - """ - Load the ribasim model from the TOML file. + self.ribasim_model.manning_resistance.node.df = pd.concat( + [self.ribasim_model.manning_resistance.node.df, manning_node] + ) + self.ribasim_model.manning_resistance.static = pd.concat( + [self.ribasim_model.manning_resistance.static.df, manning_static] + ).reset_index(drop=True) - :param ribasim_toml: Path to the ribasim TOML file - :return: Loaded ribasim model - """ - model = Model(filepath=ribasim_toml) - return model + self.ribasim_model.edge.df = pd.concat([self.ribasim_model.edge.df, edge_bergend_all]) def get_current_max_nodeid(self): - """ - Get the current maximum node ID from the model. - - :return: Maximum node ID - """ + """Get the current maximum node ID from the model where node_id is stored as an index.""" max_ids = [] - for k, v in self.model.__dict__.items(): - if hasattr(v, "node") and "node_id" in v.node.df.columns.tolist(): - mid = v.node.df.node_id.max() - if not pd.isna(mid): - max_ids.append(int(mid)) + for k, v in self.ribasim_model.__dict__.items(): + if hasattr(v, "node"): + # Check if the DataFrame's index is named 'meta_node_id' + if v.node.df.index.name == "node_id": + mid = v.node.df.index.max() + if not pd.isna(mid): + max_ids.append(int(mid)) if len(max_ids) == 0: raise ValueError("No node ids found") max_id = max(max_ids) return max_id - - def add_basin_nodes_with_manning_resistance(self): - """Add basin nodes with Manning resistance based on meta_categorie.""" - # Get the meta_categorie column from the state DataFrame - state_df = self.model.basin.state.df - - for index, row in self.model.basin.node.df.iterrows(): - node_id = row["node_id"] - - # If node_ids is specified, only process those nodes - if self.node_ids is not None and node_id not in self.node_ids: - continue - - # Retrieve the corresponding meta_categorie for the current node - meta_categorie = state_df.loc[state_df["node_id"] == node_id, "meta_categorie"].to_numpy() - - # If meta_categorie is empty, continue to the next row - if len(meta_categorie) == 0: - continue - - meta_categorie = meta_categorie[0] # Get the actual value - - if self.include_hoofdwater: - if "bergend" in meta_categorie or ( - "hoofdwater" not in meta_categorie and "doorgaand" not in meta_categorie - ): - continue - else: - if "bergend" in meta_categorie or "hoofdwater" in meta_categorie or "doorgaand" not in meta_categorie: - continue - - original_node_id = row["node_id"] - original_geometry = row["geometry"] - logging.info(f"Processing Basin Node ID: {original_node_id}") - - # Calculate new geometries - manning_geometry = Point(original_geometry.x + 5, original_geometry.y) - new_basin_geometry = Point(original_geometry.x + 10, original_geometry.y) - - # Add manning resistance node - manning_node_id = self.add_manning_resistance_node(manning_geometry) - if manning_node_id is not None: - # Add new basin node and connect to manning resistance node - new_basin_node_id = self.add_new_basin_node(new_basin_geometry) - if new_basin_node_id is not None: - self.connect_nodes(new_basin_node_id, manning_node_id, original_node_id) - else: - logging.error(f"Failed to add new basin node for Manning Resistance Node ID: {manning_node_id}") - else: - logging.error(f"Failed to add Manning Resistance node for Basin Node ID: {original_node_id}") - - def add_new_basin_node(self, geometry): - """ - Add a new basin node at the specified geometry. - - :param geometry: Geometry of the new basin node - :return: ID of the new basin node, or None if adding failed - """ - try: - max_id = self.get_current_max_nodeid() - new_node_id = max_id + 1 - key = "basin" - value = getattr(self.model, key, None) - - if value is not None: - original_geometry = None - if hasattr(value, "__dict__"): - # Retrieve the original geometry (MultiPolygon) from the first row of the basin area DataFrame - if "area" in value.__dict__ and hasattr(value.area, "df") and not value.area.df.empty: - original_geometry = value.area.df.iloc[0]["geometry"] - - for sub_key, sub_value in value.__dict__.items(): - if sub_key == "time" or sub_key == "subgrid": - continue - else: - sub_value = getattr(value, sub_key, None) - if sub_value is None or not hasattr(sub_value, "df") or sub_value.df is None: - logging.warning(f"Sub value for key '{sub_key}' is None or has no DataFrame") - continue - df_value = sub_value.df.copy() - last_row = df_value.iloc[-1].copy() - last_row["node_id"] = new_node_id - if "geometry" in last_row: - # Determine the geometry type based on the table type - if sub_key == "node": - last_row["geometry"] = geometry - elif sub_key == "area": - last_row["geometry"] = ( - original_geometry if original_geometry is not None else geometry - ) - for col in last_row.index: - if col.startswith("meta_cat"): - last_row[col] = "bergend" - new_row_df = pd.DataFrame([last_row]) - df_value = pd.concat([df_value, new_row_df], ignore_index=True) - sub_value.df = df_value - - logging.info(f"Successfully added new basin node with Node ID: {new_node_id}") - return new_node_id - else: - logging.error(f"Could not find value for key '{key}'") - return None - - except Exception as e: - logging.error(f"An error occurred while adding new basin node: {e}") - return None - - def add_manning_resistance_node(self, geometry): - """ - Add a Manning resistance node at the specified geometry. - - :param geometry: Geometry of the Manning resistance node - :return: ID of the Manning resistance node, or None if adding failed - """ - try: - max_id = self.get_current_max_nodeid() - manning_node_id = max_id + 1 - key = "manning_resistance" - value = getattr(self.model, key, None) - - if value is not None: - if hasattr(value, "__dict__"): - for sub_key, sub_value in value.__dict__.items(): - if sub_key == "time" or sub_key == "subgrid": - continue - else: - sub_value = getattr(value, sub_key, None) - if sub_value is None or not hasattr(sub_value, "df") or sub_value.df is None: - logging.warning(f"Sub value for key '{sub_key}' is None or has no DataFrame") - continue - df_value = sub_value.df.copy() - last_row = df_value.iloc[-1].copy() - last_row["node_id"] = manning_node_id - if "geometry" in last_row: - last_row["geometry"] = geometry - for col in last_row.index: - if col.startswith("meta_categ"): - last_row[col] = "bergend" - new_row_df = pd.DataFrame([last_row]) - df_value = pd.concat([df_value, new_row_df], ignore_index=True) - sub_value.df = df_value - - logging.info(f"Successfully added Manning Resistance node with Node ID: {manning_node_id}") - return manning_node_id - else: - logging.error(f"Could not find value for key '{key}'") - return None - - except Exception as e: - logging.error(f"Error adding Manning Resistance node: {e}") - return None - - def connect_nodes(self, new_basin_node_id, manning_node_id, original_node_id): - """ - Connect the new basin node to the original basin node via the Manning resistance node. - - :param new_basin_node_id: ID of the new basin node - :param manning_node_id: ID of the Manning resistance node - :param original_node_id: ID of the original basin node - """ - try: - self.model.edge.add(self.model.basin[new_basin_node_id], self.model.manning_resistance[manning_node_id]) - self.model.edge.add(self.model.manning_resistance[manning_node_id], self.model.basin[original_node_id]) - logging.info( - f"Connected new Basin Node ID: {new_basin_node_id} to original Basin Node ID: {original_node_id} via Manning Resistance Node ID: {manning_node_id}" - ) - except Exception as e: - logging.error(f"Error connecting nodes: {e}") - - def run(self): - """Run the process of adding basin nodes with Manning resistance and writing the updated model""" - self.add_basin_nodes_with_manning_resistance() - # self.write_ribasim_model() - logging.shutdown() - - return self.model - - def write_ribasim_model(self): - """Write the updated ribasim model to the output directory""" - outputdir = Path(self.output_folder) - modelcase_dir = Path(f"updated_{self.model_name.lower()}") - - full_path = outputdir / modelcase_dir - full_path.mkdir(parents=True, exist_ok=True) - - self.model.write(full_path / "ribasim.toml") - - -# Example usage -# ribasim_toml = r"C:\Users\Aerts\Desktop\RIBASIM Project\Verwerken_Feedback\modellen\AmstelGooienVecht_boezemmodel_2024_6_8\ribasim.toml" -# output_folder = r"C:\Users\Aerts\Desktop\RIBASIM Project\Verwerken_Feedback\verwerkte_modellen" -# model_name = 'test_hoofdwater' -# node_ids = [1, 2, 3] # Specify node IDs to process - -# processor = AddStorageBasin(ribasim_toml, model_name, output_folder, include_hoofdwater=True, log=True, node_ids=node_ids) -# processor.run() diff --git a/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py b/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py index 6510a8c..3081c62 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py +++ b/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py @@ -236,7 +236,7 @@ def assign_node_ids(self, crossings): numbers_df = pd.concat([numbers_from["peilgebied_from"], numbers_to["peilgebied_to"]]) numbers_df = numbers_df.unique() - if numbers_df[0] is not None: # detect the largest number + if numbers_df[0] is not None and not np.isnan(numbers_df[0]): # detect the largest number max_number = max(numbers_df.astype(int)) if max_number < 0: max_number = 0 @@ -881,13 +881,13 @@ def edge(self): from_node_type = self.edges.merge(self.nodes, left_on="from", right_on="node_id")["type"].to_numpy() edge["from_node_id"] = self.edges["from"] # from node ids - edge["from_node_type"] = from_node_type + edge["meta_from_node_type"] = from_node_type # fix the to nodes to_node_type = self.edges.merge(self.nodes, left_on="to", right_on="node_id")["type"].to_numpy() edge["to_node_id"] = self.edges["to"] # to node ids - edge["to_node_type"] = to_node_type + edge["meta_to_node_type"] = to_node_type # fill in the other columns edge["edge_type"] = "flow" @@ -895,6 +895,11 @@ def edge(self): edge["subnetwork_id"] = None edge["geometry"] = self.edges["line_geom"] + # comply to Ribasim 2024.11 + edge = edge.reset_index(drop=True) + edge["edge_id"] = edge.index.astype(int) + edge = edge.set_index("edge_id") + return edge def basin(self): @@ -949,6 +954,12 @@ def basin(self): basin_area["geometry"] = basin_area["basins_area_geom"] basin_area["meta_streefpeil"] = basin_area["streefpeil"] basin_area = basin_area[["node_id", "meta_streefpeil", "geometry"]] + basin_area = gpd.GeoDataFrame(basin_area, geometry="geometry").to_crs(crs="EPSG:28992") + + # comply to Ribasim 2024.11 + basin_node["meta_node_id"] = basin_node["node_id"].copy().astype(int) + basin_area["meta_node_id"] = basin_area["node_id"].copy().astype(int) + basin_node = basin_node.set_index("node_id") return basin_node, basin_profile, basin_static, basin_state, basin_area @@ -994,6 +1005,10 @@ def tabulated_rating_curve(self): rating_curve_static["control_state"] = np.nan # rating_curve_static = rating_curve_static.reset_index(drop=True) + # comply to Ribasim 2024.11 + rating_curve_node["meta_node_id"] = rating_curve_node["node_id"].copy().astype(int) + rating_curve_node = rating_curve_node.set_index("node_id") + return rating_curve_node, rating_curve_static def pump(self): @@ -1022,6 +1037,10 @@ def pump(self): pump_static["max_flow_rate"] = np.nan pump_static["control_state"] = np.nan + # comply to Ribasim 2024.11 + pump_node["meta_node_id"] = pump_node["node_id"].copy().astype(int) + pump_node = pump_node.set_index("node_id") + return pump_node, pump_static def level_boundary(self): @@ -1049,6 +1068,10 @@ def level_boundary(self): level_boundary_static["active"] = np.nan level_boundary_static["level"] = 0 + # comply to Ribasim 2024.11 + level_boundary_node["meta_node_id"] = level_boundary_node["node_id"].copy().astype(int) + level_boundary_node = level_boundary_node.set_index("node_id") + return level_boundary_node, level_boundary_static def flow_boundary(self): @@ -1076,6 +1099,10 @@ def flow_boundary(self): flow_boundary_static["active"] = np.nan flow_boundary_static["flow_rate"] = 0 + # comply to Ribasim 2024.11 + flow_boundary_node["meta_node_id"] = flow_boundary_node["node_id"].copy().astype(int) + flow_boundary_node = flow_boundary_node.set_index("node_id") + return flow_boundary_node, flow_boundary_static def linear_resistance(self): @@ -1116,6 +1143,10 @@ def manning_resistance(self): manning_resistance_static["profile_width"] = 2 manning_resistance_static["profile_slope"] = 3 + # comply to Ribasim 2024.11 + manning_resistance_node["meta_node_id"] = manning_resistance_node["node_id"].copy().astype(int) + manning_resistance_node = manning_resistance_node.set_index("node_id") + return manning_resistance_node, manning_resistance_static def fractional_flow(self): @@ -1148,6 +1179,10 @@ def terminal(self): terminal_node["subnetwork_id"] = np.nan terminal_node["geometry"] = terminal_nodes["geometry"] + # comply to Ribasim 2024.11 + terminal_node["meta_node_id"] = terminal_node["node_id"].copy().astype(int) + terminal_node = terminal_node.set_index("node_id") + return terminal_node def outlet(self, model): @@ -1417,7 +1452,7 @@ def check(self, model, post_processed_data, crossings): basin_nodes = ( model.basin.state.df.copy() ) # .loc[model.basin.state.df['node_type'] == 'Basin'] #select all basins - model.basin.node.df.index += 1 + # model.basin.node.df.index += 1 #RB: outcommented plus one basin_nodes["geometry"] = model.basin.node.df.geometry # add geometry column basin_nodes = gpd.GeoDataFrame(basin_nodes, geometry="geometry") # convert from pd go gpd @@ -1439,11 +1474,12 @@ def check(self, model, post_processed_data, crossings): # inlaten_TRC inlaten_TRC = nodes_from_boezem.loc[ - (nodes_from_boezem.to_node_type == "TabulatedRatingCurve") | (nodes_from_boezem.to_node_type == "Outlet") + (nodes_from_boezem.meta_to_node_type == "TabulatedRatingCurve") + | (nodes_from_boezem.meta_to_node_type == "Outlet") ] inlaten_TRC = inlaten_TRC["to_node_id"] inlaten_TRC = model.tabulated_rating_curve.node.df.loc[ - model.tabulated_rating_curve.node.df.node_id.isin(inlaten_TRC) + model.tabulated_rating_curve.node.df.index.isin(inlaten_TRC) # df.node_id --> df.index ] # add the outlets if this code is already ran before @@ -1454,32 +1490,37 @@ def check(self, model, post_processed_data, crossings): inlaten_TRC["meta_type_verbinding"] = "Inlaat" # inlaten_gemalen - inlaten_gemalen = nodes_from_boezem.loc[nodes_from_boezem.to_node_type == "Pump"] + inlaten_gemalen = nodes_from_boezem.loc[nodes_from_boezem.meta_to_node_type == "Pump"] inlaten_gemalen = inlaten_gemalen["to_node_id"] - inlaten_gemalen = model.pump.node.df.loc[model.pump.node.df.node_id.isin(inlaten_gemalen)] + inlaten_gemalen = model.pump.node.df.loc[ + model.pump.node.df.index.isin(inlaten_gemalen) + ] # df.node_id --> df.index inlaten_gemalen["meta_type_verbinding"] = "Inlaat" # inlaten_flowboundary - inlaten_flowboundary = nodes_to_boezem.loc[nodes_to_boezem.from_node_type == "FlowBoundary"] + inlaten_flowboundary = nodes_to_boezem.loc[nodes_to_boezem.meta_from_node_type == "FlowBoundary"] inlaten_flowboundary = inlaten_flowboundary["from_node_id"] inlaten_flowboundary = model.flow_boundary.node.df.loc[ - model.flow_boundary.node.df.node_id.isin(inlaten_flowboundary) + model.flow_boundary.node.df.index.isin(inlaten_flowboundary) # df.node_id --> df.index ] inlaten_flowboundary["meta_type_verbinding"] = "Inlaat boundary" # uitlaten_TRC uitlaten_TRC = nodes_to_boezem.loc[ - (nodes_to_boezem.from_node_type == "TabulatedRatingCurve") | (nodes_to_boezem.from_node_type == "Outlet") + (nodes_to_boezem.meta_from_node_type == "TabulatedRatingCurve") + | (nodes_to_boezem.meta_from_node_type == "Outlet") ] uitlaten_TRC = uitlaten_TRC["from_node_id"] uitlaten_TRC = model.tabulated_rating_curve.node.df.loc[ - model.tabulated_rating_curve.node.df.node_id.isin(uitlaten_TRC) + model.tabulated_rating_curve.node.df.index.isin(uitlaten_TRC) # df.node_id --> df.index ] # uitlaten_gemalen - uitlaten_gemalen = nodes_to_boezem.loc[nodes_to_boezem.from_node_type == "Pump"] + uitlaten_gemalen = nodes_to_boezem.loc[nodes_to_boezem.meta_from_node_type == "Pump"] uitlaten_gemalen = uitlaten_gemalen["from_node_id"] - uitlaten_gemalen = model.pump.node.df.loc[model.pump.node.df.node_id.isin(uitlaten_gemalen)] + uitlaten_gemalen = model.pump.node.df.loc[ + model.pump.node.df.index.isin(uitlaten_gemalen) + ] # df.node_id --> df.index uitlaten_gemalen["meta_type_verbinding"] = "Uitlaat" # add the outlets if this code is already ran before @@ -1490,10 +1531,10 @@ def check(self, model, post_processed_data, crossings): uitlaten_TRC["meta_type_verbinding"] = "Uitlaat" # uitlaten_flowboundary - uitlaten_flowboundary = nodes_to_boezem.loc[nodes_to_boezem.from_node_type == "FlowBoundary"] + uitlaten_flowboundary = nodes_to_boezem.loc[nodes_to_boezem.meta_from_node_type == "FlowBoundary"] uitlaten_flowboundary = uitlaten_flowboundary["from_node_id"] uitlaten_flowboundary = model.flow_boundary.node.df.loc[ - model.flow_boundary.node.df.node_id.isin(uitlaten_flowboundary) + model.flow_boundary.node.df.index.isin(uitlaten_flowboundary) # df.node_id --> df.index ] uitlaten_flowboundary["meta_type_verbinding"] = "Inlaat boundary" @@ -1522,12 +1563,12 @@ def check(self, model, post_processed_data, crossings): # we first need to identify the connecting nodes, such as TRC and pumps, which originate from the boundaries # after that has been done, these nodes should be filtered based on whether they are connected with the nodes_from/to_boezem # BCN = Boundary Connection Nodes - condition_BCN_to_pump = nodes_from_boezem.to_node_type == "Pump" - condition_BCN_to_TRC = nodes_from_boezem.to_node_type == "TabulatedRatingCurve" - condition_BCN_to_outlet = nodes_from_boezem.to_node_type == "Outlet" - condition_BCN_from_pump = nodes_to_boezem.from_node_type == "Pump" - condition_BCN_from_TRC = nodes_to_boezem.from_node_type == "TabulatedRatingCurve" - condition_BCN_from_outlet = nodes_to_boezem.from_node_type == "Outlet" + condition_BCN_to_pump = nodes_from_boezem.meta_to_node_type == "Pump" + condition_BCN_to_TRC = nodes_from_boezem.meta_to_node_type == "TabulatedRatingCurve" + condition_BCN_to_outlet = nodes_from_boezem.meta_to_node_type == "Outlet" + condition_BCN_from_pump = nodes_to_boezem.meta_from_node_type == "Pump" + condition_BCN_from_TRC = nodes_to_boezem.meta_from_node_type == "TabulatedRatingCurve" + condition_BCN_from_outlet = nodes_to_boezem.meta_from_node_type == "Outlet" BCN_from = nodes_from_boezem.loc[ condition_BCN_to_pump | condition_BCN_to_TRC | condition_BCN_to_outlet @@ -1542,19 +1583,23 @@ def check(self, model, post_processed_data, crossings): BCN_from = BCN_from.to_node_id # step 1 BCN_from = model.edge.df.loc[model.edge.df.from_node_id.isin(BCN_from)] # step 2 BCN_from = BCN_from.loc[ - (BCN_from.to_node_type == "FlowBoundary") - | (BCN_from.to_node_type == "LevelBoundary") - | (BCN_from.to_node_type == "Terminal") + (BCN_from.meta_to_node_type == "FlowBoundary") + | (BCN_from.meta_to_node_type == "LevelBoundary") + | (BCN_from.meta_to_node_type == "Terminal") ] # look the node ids up in each table. BCN_from_TRC = model.tabulated_rating_curve.node.df.loc[ - model.tabulated_rating_curve.node.df.node_id.isin(BCN_from.from_node_id) + model.tabulated_rating_curve.node.df.index.isin(BCN_from.from_node_id) # df.node_id --> df.index ] - BCN_from_pump = model.pump.node.df.loc[model.pump.node.df.node_id.isin(BCN_from.from_node_id)] + BCN_from_pump = model.pump.node.df.loc[ + model.pump.node.df.index.isin(BCN_from.from_node_id) + ] # df.node_id --> df.index if model.outlet.node.df is not None: - BCN_from_outlet = model.outlet.node.df.loc[model.outlet.node.df.node_id.isin(BCN_from)] + BCN_from_outlet = model.outlet.node.df.loc[ + model.outlet.node.df.index.isin(BCN_from) + ] # df.node_id --> df.index BCN_from = pd.concat([BCN_from_TRC, BCN_from_pump, BCN_from_outlet]) else: BCN_from = pd.concat([BCN_from_TRC, BCN_from_pump]) @@ -1566,20 +1611,22 @@ def check(self, model, post_processed_data, crossings): BCN_to = BCN_to.from_node_id # step 1 BCN_to = model.edge.df.loc[model.edge.df.to_node_id.isin(BCN_to)] # step 2 BCN_to = BCN_to.loc[ - (BCN_to.from_node_type == "FlowBoundary") - | (BCN_to.from_node_type == "LevelBoundary") - | (BCN_to.from_node_type == "Terminal") + (BCN_to.meta_from_node_type == "FlowBoundary") + | (BCN_to.meta_from_node_type == "LevelBoundary") + | (BCN_to.meta_from_node_type == "Terminal") ] BCN_to["meta_type_verbinding"] = "Inlaat boundary" # look the node ids up in each table. BCN_to_TRC = model.tabulated_rating_curve.node.df.loc[ - model.tabulated_rating_curve.node.df.node_id.isin(BCN_to.to_node_id) + model.tabulated_rating_curve.node.df.index.isin(BCN_to.to_node_id) # df.node_id --> df.index ] - BCN_to_pump = model.pump.node.df.loc[model.pump.node.df.node_id.isin(BCN_to.to_node_id)] + BCN_to_pump = model.pump.node.df.loc[ + model.pump.node.df.index.isin(BCN_to.to_node_id) + ] # df.node_id --> df.index if model.outlet.node.df is not None: - BCN_to_outlet = model.outlet.node.df.loc[model.outlet.node.df.node_id.isin(BCN_to)] + BCN_to_outlet = model.outlet.node.df.loc[model.outlet.node.df.index.isin(BCN_to)] # df.node_id --> df.index BCN_to = pd.concat([BCN_to_TRC, BCN_to_pump, BCN_to_outlet]) else: BCN_to = pd.concat([BCN_to_TRC, BCN_to_pump]) @@ -1602,7 +1649,7 @@ def add_meta_data(self, model, checks, post_processed_data, crossings): basin_nodes = ( model.basin.state.df.copy() ) # .loc[model.basin.state.df['node_type'] == 'Basin'] #select all basins - model.basin.node.df.index += 1 + # model.basin.node.df.index += 1 basin_nodes["geometry"] = model.basin.node.df.geometry # add geometry column basin_nodes = gpd.GeoDataFrame(basin_nodes, geometry="geometry") # convert from pd go gpd @@ -1664,25 +1711,31 @@ def add_meta_data(self, model, checks, post_processed_data, crossings): # TabulatedRatingCurve model.tabulated_rating_curve.static.df = model.tabulated_rating_curve.static.df.merge( - checks["inlaten_uitlaten_boezems"][["node_id", "meta_type_verbinding"]], - left_on=["node_id"], - right_on=["node_id"], + checks["inlaten_uitlaten_boezems"][["meta_type_verbinding"]], + left_index=True, + right_index=True, + # left_on=["node_id"], + # right_on=["node_id"], how="left", ) # Pump model.pump.static.df = model.pump.static.df.merge( - checks["inlaten_uitlaten_boezems"][["node_id", "meta_type_verbinding"]], - left_on=["node_id"], - right_on=["node_id"], + checks["inlaten_uitlaten_boezems"][["meta_type_verbinding"]], + left_index=True, + right_index=True, + # left_on=["node_id"], + # right_on=["node_id"], how="left", ) # FlowBoundary model.flow_boundary.static.df = model.flow_boundary.static.df.merge( - checks["inlaten_uitlaten_boezems"][["node_id", "meta_type_verbinding"]], - left_on=["node_id"], - right_on=["node_id"], + checks["inlaten_uitlaten_boezems"][["meta_type_verbinding"]], + left_index=True, + right_index=True, + # left_on=["node_id"], + # right_on=["node_id"], how="left", ) diff --git a/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py b/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py index 826a1f4..71b1048 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py +++ b/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py @@ -30,13 +30,12 @@ def get_current_max_nodeid(ribasim_model): if len(df_all_nodes) == 0: max_id = 1 else: - max_id = int(df_all_nodes.node_id.max()) + max_id = int(df_all_nodes.meta_node_id.max()) return max_id def set_initial_basin_state(ribasim_model): - # display(ribasim_model.basin.state.df) if "meta_peilgebied_cat" in list(ribasim_model.basin.node.df.keys()): basin_state_df = ribasim_model.basin.node.df[["node_id", "meta_peilgebied_cat"]] basin_state_df["meta_categorie"] = basin_state_df["meta_peilgebied_cat"] @@ -53,7 +52,7 @@ def insert_standard_profile( ): profile = ribasim_model.basin.area.df.copy() profile.node_id, profile.meta_streefpeil = ( - profile.node_id.astype(int), + profile.meta_node_id.astype(int), profile.meta_streefpeil.astype(float), ) # convert to numbers @@ -120,17 +119,17 @@ def insert_standard_profile( profile_total = profile_total.sort_values(by=["node_id", "level"]).reset_index(drop=True) # insert the new tables in the model - ribasim_model.basin.profile.df = profile_total + ribasim_model.basin.profile = profile_total # due to the bergende basin, the surface area has been doubled. Correct this. ribasim_model.basin.profile.df.area /= 2 - # The newly created (storage) basins do not have a correct initial level yet. Fix this as well. - initial_level = ribasim_model.basin.profile.df.copy() - initial_level = initial_level.drop_duplicates(subset="node_id", keep="last") - ribasim_model.basin.state.df["level"] = ribasim_model.basin.state.df.merge(right=initial_level, on="node_id")[ - "level_y" - ] + # # The newly created (storage) basins do not have a correct initial level yet. Fix this as well. + # initial_level = ribasim_model.basin.profile.df.copy() + # initial_level = initial_level.drop_duplicates(subset="node_id", keep="last") + # ribasim_model.basin.state.df["level"] = ribasim_model.basin.state.df.merge(right=initial_level, on="node_id")[ + # "level_y" + # ] return @@ -216,12 +215,12 @@ def set_static_forcing(timesteps: int, timestep_size: str, start_time: str, forc # "urban_runoff": forcing_data_df['urban_runoff'] # }) for node_id in node_ids], ignore_index=True) - all_node_forcing_data = ribasim_model.basin.node.df[["node_id"]].copy() + all_node_forcing_data = ribasim_model.basin.node.df[["meta_node_id"]].copy() for col_name, col_value in forcing_dict.items(): all_node_forcing_data[col_name] = col_value # Update Model - ribasim_model.basin.static.df = all_node_forcing_data + ribasim_model.basin.static = all_node_forcing_data.reset_index() ribasim_model.starttime = time_range[0].to_pydatetime() ribasim_model.endtime = time_range[-1].to_pydatetime() @@ -236,12 +235,17 @@ def Terminals_to_LevelBoundaries(ribasim_model, default_level=0): # second, implement the LevelBoundary nodes and change the node_type nodes_Terminals.node_type = "LevelBoundary" + ribasim_model.level_boundary.node.df = pd.concat([ribasim_model.level_boundary.node.df, nodes_Terminals]) + ribasim_model.level_boundary.node.df = ribasim_model.level_boundary.node.df.sort_values(by="meta_node_id") # third, implement the LevelBoundary static + nodes_Terminals = nodes_Terminals.reset_index() LB_static = nodes_Terminals[["node_id"]] LB_static["level"] = default_level - ribasim_model.level_boundary.static.df = pd.concat([ribasim_model.level_boundary.static.df, LB_static]) + LB_combined = pd.concat([ribasim_model.level_boundary.static.df, LB_static]) + LB_combined = LB_combined.drop_duplicates(subset="node_id").sort_values(by="node_id").reset_index(drop=True) + ribasim_model.level_boundary.static = LB_combined # fourth, update the edges table. ribasim_model.edge.df.replace(to_replace="Terminal", value="LevelBoundary", inplace=True) @@ -264,22 +268,30 @@ def FlowBoundaries_to_LevelBoundaries(ribasim_model, default_level=0): nodes_TRC_FlowBoundary["node_type"] = "TabulatedRatingCurve" # supplement the TRC.node table - ribasim_model.tabulated_rating_curve.node.df = pd.concat( - [ribasim_model.tabulated_rating_curve.node.df, nodes_TRC_FlowBoundary] - ).reset_index(drop=True) + new_TRC_node = pd.concat([ribasim_model.tabulated_rating_curve.node.df, nodes_TRC_FlowBoundary]).reset_index( + drop=True + ) + new_TRC_node["node_id"] = new_TRC_node["meta_node_id"].copy() + new_TRC_node = new_TRC_node.set_index("node_id") + + ribasim_model.tabulated_rating_curve.node.df = new_TRC_node + + # ribasim_model.tabulated_rating_curve.node.df = pd.concat( + # [ribasim_model.tabulated_rating_curve.node.df, nodes_TRC_FlowBoundary] + # ).reset_index(drop=True) # also supplement the TRC.static table. Create dummy Q(h)-relations - TRC_LB1 = nodes_FlowBoundary[["node_id"]] + TRC_LB1 = nodes_FlowBoundary[["meta_node_id"]] TRC_LB1["level"] = 0 TRC_LB1["flow_rate"] = 0 - TRC_LB2 = nodes_FlowBoundary[["node_id"]] + TRC_LB2 = nodes_FlowBoundary[["meta_node_id"]] TRC_LB2["level"] = 1 TRC_LB2["flow_rate"] = 1 TRC_LB = pd.concat([TRC_LB1, TRC_LB2]) TRC_LB = TRC_LB.sort_values(by=["node_id", "level"]).reset_index(drop=True) - ribasim_model.tabulated_rating_curve.static.df = pd.concat( + ribasim_model.tabulated_rating_curve.static = pd.concat( [ribasim_model.tabulated_rating_curve.static.df, TRC_LB] ).reset_index(drop=True) @@ -293,33 +305,42 @@ def FlowBoundaries_to_LevelBoundaries(ribasim_model, default_level=0): # up till this point, all FlowBoundaries have been converted to TRC's. Now the actual LevelBoundaries needs to be created max_id = get_current_max_nodeid(ribasim_model) - nodes_FlowBoundary["node_id_old"] = nodes_FlowBoundary.node_id # store for later + + nodes_FlowBoundary["meta_old_node_id"] = nodes_FlowBoundary.meta_node_id # store for later nodes_FlowBoundary["node_id"] = max_id + nodes_FlowBoundary.index + 1 # implement new id's + # nodes_FlowBoundary["node_id"] = nodes_FlowBoundary.meta_node_id.copy() nodes_FlowBoundary["geometry"] = nodes_FlowBoundary.geometry.translate( xoff=-1, yoff=-1 ) # move the points 1 meter to the lower left (diagonally) nodes_FlowBoundary["node_type"] = "LevelBoundary" - nodes_FlowBoundary["level"] = default_level + + # set the new node_id; overrule the old + nodes_FlowBoundary = nodes_FlowBoundary.set_index("node_id", drop=True) + nodes_FlowBoundary = nodes_FlowBoundary[["node_type", "geometry", "meta_old_node_id"]] nodes_LevelBoundary = nodes_FlowBoundary.copy(deep=True) # for clarity # supplement the LB.node table - ribasim_model.level_boundary.node.df = pd.concat( - [ribasim_model.level_boundary.node.df, nodes_LevelBoundary[["node_id", "node_type", "geometry"]]] - ).reset_index(drop=True) + new_LB_node = pd.concat( + [ribasim_model.level_boundary.node.df, nodes_LevelBoundary[["node_type", "geometry", "meta_old_node_id"]]] + ) # .reset_index(drop=True) + new_LB_node["meta_node_id"] = new_LB_node.index.copy() + + ribasim_model.level_boundary.node.df = new_LB_node + # supplement the LB.static table - ribasim_model.level_boundary.static.df = pd.concat( - [ribasim_model.level_boundary.static.df, nodes_LevelBoundary[["node_id", "level"]]] - ).reset_index(drop=True) + # ribasim_model.level_boundary.static.df = pd.concat( + # [ribasim_model.level_boundary.static.df, nodes_LevelBoundary[["node_id", "level"]]] + # ).reset_index(drop=True) # the nodes have been created. Now add the edges edges_LB = pd.DataFrame() - edges_LB["from_node_id"] = nodes_LevelBoundary[ - "node_id" - ] # as these nodes were initially FlowBoundaries, they always flow into the model, not out. Thus, is always the starting point (=from_node_id) - edges_LB["from_node_type"] = "LevelBoundary" - edges_LB["to_node_id"] = nodes_LevelBoundary["node_id_old"] - edges_LB["to_node_type"] = "TabulatedRatingCurve" + edges_LB["from_node_id"] = ( + nodes_LevelBoundary.index.copy() + ) # nodes_LevelBoundary["meta_node_id"].copy() # as these nodes were initially FlowBoundaries, they always flow into the model, not out. Thus, is always the starting point (=from_node_id) + edges_LB["meta_from_node_type"] = "LevelBoundary" + edges_LB["to_node_id"] = nodes_LevelBoundary["meta_old_node_id"].values.to_numpy() + edges_LB["meta_to_node_type"] = "TabulatedRatingCurve" edges_LB["meta_categorie"] = "doorgaand" # find the geometries, based on the from and to points @@ -331,16 +352,45 @@ def create_linestring(row): return LineString([row["from_point"], row["to_point"]]) # create the linestrings, and plug them into the df of edges_LB - if len(nodes_FlowBoundary) > 0: - lines_LB["line"] = lines_LB.apply(create_linestring, axis=1) - edges_LB["geometry"] = lines_LB["line"] + if len(lines_LB) > 0: + lines_LB["geometry"] = lines_LB.apply(create_linestring, axis=1) + # edges_LB["geometry"] = lines_LB["line"] + + # merge the geometries to the newtemp + edges_LB = edges_LB.merge(right=lines_LB[["geometry"]], left_on="from_node_id", right_index=True, how="left") # concat the original edges with the newly created edges of the LevelBoundaries - ribasim_model.edge.df = pd.concat([ribasim_model.edge.df, edges_LB]).reset_index(drop=True) + new_edges = pd.concat([ribasim_model.edge.df, edges_LB]).reset_index(drop=True) + new_edges["edge_id"] = new_edges.index.copy() + 1 + new_edges = new_edges[ + [ + "edge_id", + "from_node_id", + "to_node_id", + "edge_type", + "name", + "subnetwork_id", + "geometry", + "meta_from_node_type", + "meta_to_node_type", + "meta_categorie", + ] + ] + new_edges = new_edges.set_index("edge_id") + ribasim_model.edge.df = new_edges # replace all 'FlowBoundaries' with 'LevelBoundaries' in the edge table # ribasim_model.edge.df.replace(to_replace='FlowBoundary', value='LevelBoundary', inplace=True) + # create the static table for the + static_LevelBoundary = nodes_LevelBoundary.reset_index().copy()[["node_id"]] + static_LevelBoundary = static_LevelBoundary.rename(columns={"meta_node_id": "node_id"}) + static_LevelBoundary["level"] = default_level + new_static_LevelBoundary = pd.concat([ribasim_model.level_boundary.static.df, static_LevelBoundary]).reset_index( + drop=True + ) + ribasim_model.level_boundary.static = new_static_LevelBoundary + return @@ -352,7 +402,9 @@ def add_outlets(ribasim_model, delta_crest_level=0.10): # ] # update: change all TRC's to Outlets - TRC_naar_OL = ribasim_model.tabulated_rating_curve.static.df.copy() + # TRC_naar_OL = ribasim_model.tabulated_rating_curve.static.df.copy() #aanpassing RB 11 oktober + TRC_naar_OL = ribasim_model.tabulated_rating_curve.node.df.copy() + TRC_naar_OL = TRC_naar_OL.reset_index() # convert the node_id index to a regular column TRC_naar_OL = TRC_naar_OL.drop_duplicates(subset="node_id", keep="first") TRC_naar_OL = TRC_naar_OL[["node_id"]] @@ -372,35 +424,45 @@ def add_outlets(ribasim_model, delta_crest_level=0.10): # clean the df for clarity. Next, add the levels to the outlet df target_level = target_level[["node_id_x", "level"]] - target_level.rename(columns={"level": "min_crest_level", "node_id_x": "node_id"}, inplace=True) + target_level.rename(columns={"level": "meta_min_crest_level", "node_id_x": "node_id"}, inplace=True) outlet = target_level.copy(deep=True) - outlet["min_crest_level"] -= ( + outlet["meta_min_crest_level"] -= ( delta_crest_level # the peil of the boezem is allowed to lower with this much before no water will flow through the outlet, to prevent ) get_outlet_geometries = ribasim_model.tabulated_rating_curve.node.df.loc[ - ribasim_model.tabulated_rating_curve.node.df.node_id.isin(outlet.node_id.to_numpy()) + ribasim_model.tabulated_rating_curve.node.df.meta_node_id.isin(outlet.node_id.to_numpy()) ] - outlet = outlet.merge(get_outlet_geometries[["node_id", "geometry"]], on="node_id") + outlet = outlet.merge( + get_outlet_geometries[["meta_node_id", "geometry"]], left_on="node_id", right_on="meta_node_id" + ) outlet["node_type"] = "Outlet" outlet["flow_rate"] = 0 # default setting outlet["meta_categorie"] = "Inlaat" + outlet_node = outlet[["node_id", "meta_node_id", "node_type", "geometry"]] + outlet_node = outlet_node.set_index("node_id") + outlet_static = outlet[["node_id", "flow_rate", "meta_min_crest_level", "meta_categorie"]] + # add the outlets to the model - ribasim_model.outlet.node.df = outlet[["node_id", "node_type", "geometry"]] - ribasim_model.outlet.static.df = outlet[["node_id", "flow_rate", "min_crest_level", "meta_categorie"]] + ribasim_model.outlet.node.df = outlet_node + ribasim_model.outlet.static = outlet_static # remove the TRC's nodes - ribasim_model.tabulated_rating_curve.node = ribasim_model.tabulated_rating_curve.node.df.loc[ - ~ribasim_model.tabulated_rating_curve.node.df.node_id.isin(outlet.node_id) - ].reset_index(drop=True) + ribasim_model.tabulated_rating_curve.node.df = ribasim_model.tabulated_rating_curve.node.df.loc[ + ~ribasim_model.tabulated_rating_curve.node.df.meta_node_id.isin(outlet.meta_node_id) + ] # .reset_index(drop=True) ribasim_model.tabulated_rating_curve.static = ribasim_model.tabulated_rating_curve.static.df.loc[ - ~ribasim_model.tabulated_rating_curve.static.df.node_id.isin(outlet.node_id) + ribasim_model.tabulated_rating_curve.static.df.node_id.isin( + ribasim_model.tabulated_rating_curve.node.df.index.to_numpy() + ) ].reset_index(drop=True) # replace the from_node_type and the to_node_type in the edge table - ribasim_model.edge.df.loc[ribasim_model.edge.df.from_node_id.isin(outlet.node_id), "from_node_type"] = "Outlet" - ribasim_model.edge.df.loc[ribasim_model.edge.df.to_node_id.isin(outlet.node_id), "to_node_type"] = "Outlet" + ribasim_model.edge.df = ribasim_model.edge.df.replace(to_replace="TabulatedRatingCurve", value="Outlet") + + # ribasim_model.edge.df.loc[ribasim_model.edge.df.from_node_id.isin(outlet.node_id), "from_node_type"] = "Outlet" + # ribasim_model.edge.df.loc[ribasim_model.edge.df.to_node_id.isin(outlet.node_id), "to_node_type"] = "Outlet" return @@ -820,8 +882,8 @@ def validate_basin_area(model): too_small_basins = [] error = False for index, row in model.basin.node.df.iterrows(): - basin_id = int(row["node_id"]) - basin_geometry = model.basin.area.df.loc[model.basin.area.df["node_id"] == basin_id, "geometry"] + basin_id = int(row["meta_node_id"]) + basin_geometry = model.basin.area.df.loc[model.basin.area.df["meta_node_id"] == basin_id, "geometry"] if not basin_geometry.empty: basin_area = basin_geometry.iloc[0].area if basin_area < 100: @@ -856,25 +918,41 @@ def identify_node_meta_categorie(ribasim_model): # select the nodes which originate from, and go to a boundary nodes_from_boundary = ribasim_model.edge.df.loc[ - ribasim_model.edge.df.from_node_type == "LevelBoundary", "to_node_id" + ribasim_model.edge.df.meta_from_node_type == "LevelBoundary", "to_node_id" + ] + nodes_to_boundary = ribasim_model.edge.df.loc[ + ribasim_model.edge.df.meta_to_node_type == "LevelBoundary", "from_node_id" ] - nodes_to_boundary = ribasim_model.edge.df.loc[ribasim_model.edge.df.to_node_type == "LevelBoundary", "from_node_id"] # identify the INlaten from the boezem, both stuwen (outlets) and gemalen (pumps) ribasim_model.outlet.static.df.loc[ ribasim_model.outlet.static.df.node_id.isin(nodes_from_boezem), "meta_categorie" ] = "Inlaat boezem, stuw" - ribasim_model.pump.static.df.loc[ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem), "meta_categorie"] = ( - "Inlaat boezem, gemaal" - ) + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), + "meta_categorie", + ] = "Inlaat boezem, afvoer gemaal" + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), + "meta_categorie", + ] = "Inlaat boezem, aanvoer gemaal" # identify the UITlaten from the boezem, both stuwen (outlets) and gemalen (pumps) ribasim_model.outlet.static.df.loc[ ribasim_model.outlet.static.df.node_id.isin(nodes_to_boezem), "meta_categorie" ] = "Uitlaat boezem, stuw" - ribasim_model.pump.static.df.loc[ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem), "meta_categorie"] = ( - "Uitlaat boezem, gemaal" - ) + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), + "meta_categorie", + ] = "Uitlaat boezem, afvoer gemaal" + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), + "meta_categorie", + ] = "Uitlaat boezem, aanvoer gemaal" # identify the outlets and pumps at the regular peilgebieden ribasim_model.outlet.static.df.loc[ @@ -884,29 +962,58 @@ def identify_node_meta_categorie(ribasim_model): ), "meta_categorie", ] = "Reguliere stuw" + ribasim_model.pump.static.df.loc[ ~( (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)) | (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)) ), "meta_categorie", - ] = "Regulier gemaal" + ] = "Regulier gemaal" # differentiate between afvoer and aanvoer below + + # differentiate between reguliere afvoer and regulieren aanvoer gemalen + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.meta_categorie == "Regulier gemaal") + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), + "meta_categorie", + ] = "Regulier afvoer gemaal" + + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.meta_categorie == "Regulier gemaal") + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), + "meta_categorie", + ] = "Regulier aanvoer gemaal" # repeat for the boundary nodes # identify the buitenwater uitlaten and inlaten. A part will be overwritten later, if its a boundary & boezem. ribasim_model.outlet.static.df.loc[ ribasim_model.outlet.static.df.node_id.isin(nodes_to_boundary), "meta_categorie" ] = "Uitlaat buitenwater peilgebied, stuw" - ribasim_model.pump.static.df.loc[ribasim_model.pump.static.df.node_id.isin(nodes_to_boundary), "meta_categorie"] = ( - "Uitlaat buitenwater peilgebied, gemaal" - ) + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_to_boundary)) + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), + "meta_categorie", + ] = "Uitlaat buitenwater peilgebied, afvoer gemaal" + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_to_boundary)) + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), + "meta_categorie", + ] = "Uitlaat buitenwater peilgebied, aanvoer gemaal" ribasim_model.outlet.static.df.loc[ ribasim_model.outlet.static.df.node_id.isin(nodes_from_boundary), "meta_categorie" ] = "Inlaat buitenwater peilgebied, stuw" ribasim_model.pump.static.df.loc[ - ribasim_model.pump.static.df.node_id.isin(nodes_from_boundary), "meta_categorie" - ] = "Inlaat buitenwater peilgebied, gemaal" + (ribasim_model.pump.static.df.node_id.isin(nodes_from_boundary)) + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), + "meta_categorie", + ] = "Inlaat buitenwater peilgebied, afvoer gemaal" + + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_from_boundary)) + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), + "meta_categorie", + ] = "Inlaat buitenwater peilgebied, aanvoer gemaal" # boundary & boezem. This is the part where a portion of the already defined meta_categorie will be overwritten by the code above. ribasim_model.outlet.static.df.loc[ @@ -914,11 +1021,20 @@ def identify_node_meta_categorie(ribasim_model): & (ribasim_model.outlet.static.df.node_id.isin(nodes_from_boezem)), # to "meta_categorie", ] = "Uitlaat buitenwater boezem, stuw" + ribasim_model.pump.static.df.loc[ (ribasim_model.pump.static.df.node_id.isin(nodes_to_boundary)) - & (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)), # to + & (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), # to "meta_categorie", - ] = "Uitlaat buitenwater boezem, gemaal" + ] = "Uitlaat buitenwater boezem, afvoer gemaal" + + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_to_boundary)) + & (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), # to + "meta_categorie", + ] = "Uitlaat buitenwater boezem, aanvoer gemaal" ribasim_model.outlet.static.df.loc[ (ribasim_model.outlet.static.df.node_id.isin(nodes_from_boundary)) @@ -927,9 +1043,17 @@ def identify_node_meta_categorie(ribasim_model): ] = "Inlaat buitenwater boezem, stuw" ribasim_model.pump.static.df.loc[ (ribasim_model.pump.static.df.node_id.isin(nodes_from_boundary)) - & (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)), # from + & (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), # from + "meta_categorie", + ] = "Inlaat buitenwater boezem, afvoer gemaal" + + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_from_boundary)) + & (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), # from "meta_categorie", - ] = "Inlaat buitenwater boezem, gemaal" + ] = "Inlaat buitenwater boezem, aanvoer gemaal" # boezem & boezem. ribasim_model.outlet.static.df.loc[ @@ -940,9 +1064,17 @@ def identify_node_meta_categorie(ribasim_model): ribasim_model.pump.static.df.loc[ (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)) - & (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)), + & (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer == 0), + "meta_categorie", + ] = "Boezem boezem, afvoer gemaal" + + ribasim_model.pump.static.df.loc[ + (ribasim_model.pump.static.df.node_id.isin(nodes_from_boezem)) + & (ribasim_model.pump.static.df.node_id.isin(nodes_to_boezem)) + & (ribasim_model.pump.static.df.meta_func_aanvoer != 0), "meta_categorie", - ] = "Boezem boezem, gemaal" + ] = "Boezem boezem, aanvoer gemaal" # some pumps have been added due to the feedback form. Assume all these nodes are afvoer gemalen ribasim_model.pump.static.df.meta_func_afvoer.fillna(value=1.0, inplace=True) @@ -958,6 +1090,68 @@ def load_model_settings(file_path): return settings +def determine_min_upstream_max_downstream_levels(ribasim_model, waterschap): + sturing = load_model_settings(f"sturing_{waterschap}.json") # load the waterschap specific sturing + + # create empty columns for the sturing + ribasim_model.outlet.static.df["min_upstream_level"] = np.nan + ribasim_model.outlet.static.df["max_downstream_level"] = np.nan + ribasim_model.outlet.static.df["max_flow_rate"] = np.nan + ribasim_model.outlet.static.df["flow_rate"] = np.nan + + ribasim_model.pump.static.df["min_upstream_level"] = np.nan + ribasim_model.pump.static.df["max_downstream_level"] = np.nan + ribasim_model.pump.static.df["max_flow_rate"] = np.nan + ribasim_model.pump.static.df["flow_rate"] = np.nan + + # make a temp copy to reduce line length, place it later again in the model + outlet = ribasim_model.outlet.static.df.copy() + pump = ribasim_model.pump.static.df.copy() + + # for each different outlet and pump type, determine the min an max upstream and downstream level + for types, settings in sturing.items(): + # Extract values for each setting + upstream_level_offset = settings["upstream_level_offset"] + downstream_level_offset = settings["downstream_level_offset"] + max_flow_rate = settings["max_flow_rate"] + + # Update the min_upstream_level and max_downstream_level in the OUTLET dataframe + outlet.loc[outlet.meta_categorie == types, "min_upstream_level"] = ( + outlet.meta_from_level - upstream_level_offset + ) + outlet.loc[outlet.meta_categorie == types, "max_downstream_level"] = ( + outlet.meta_to_level + downstream_level_offset + ) + outlet.loc[outlet.meta_categorie == types, "flow_rate"] = max_flow_rate + + # Update the min_upstream_level and max_downstream_level in the PUMP dataframe. can be done within the same loop, as the meta_categorie is different for the outlet and pump + pump.loc[pump.meta_categorie == types, "min_upstream_level"] = pump.meta_from_level - upstream_level_offset + pump.loc[pump.meta_categorie == types, "max_downstream_level"] = pump.meta_to_level + downstream_level_offset + pump.loc[pump.meta_categorie == types, "flow_rate"] = max_flow_rate + + # outlet['flow_rate'] = outlet['max_flow_rate'] + # pump['flow_rate'] = pump['max_flow_rate'] + + # raise warning if there are np.nan in the columns + def check_for_nans_in_columns( + df, outlet_or_pump, columns_to_check=["min_upstream_level", "max_downstream_level", "flow_rate", "flow_rate"] + ): + if df[columns_to_check].isnull().values.any(): + warnings.warn( + f"Warning: NaN values found in the following columns of the {outlet_or_pump} dataframe: " + f"{', '.join([col for col in columns_to_check if df[col].isnull().any()])}" + ) + + check_for_nans_in_columns(outlet, "outlet") + check_for_nans_in_columns(pump, "pump") + + # place the df's back in the ribasim_model + ribasim_model.outlet.static.df = outlet + ribasim_model.pump.static.df = pump + + return + + def add_discrete_control(ribasim_model, waterschap, default_level): """Add discrete control nodes to the network. The rules are based on the meta_categorie of each node.""" # load in the sturing which is defined in the json files @@ -1079,8 +1273,6 @@ def add_discrete_control(ribasim_model, waterschap, default_level): "node_id", ] - # display(inlaat_buitenwater_peilgebied_gemaal_afvoer) - # display(inlaat_buitenwater_peilgebied_gemaal_aanvoer) boezem_boezem_gemaal_afvoer = ribasim_model.outlet.static.df.loc[ ( (ribasim_model.outlet.static.df.meta_categorie == "Boezem boezem, gemaal") @@ -1245,7 +1437,7 @@ def add_discrete_control_partswise(ribasim_model, nodes_to_control, category, st outlet_static_block = ribasim_model.outlet.static.df.copy() outlet_static_block["control_state"] = "block" outlet_static_block["flow_rate"] = flow_rate_block - outlet_static_block["min_crest_level"] = ( + outlet_static_block["meta_min_crest_level"] = ( np.nan ) # min crest level is redundant, as control is defined for both upstream as well as downstream levels @@ -1253,7 +1445,7 @@ def add_discrete_control_partswise(ribasim_model, nodes_to_control, category, st outlet_static_pass = ribasim_model.outlet.static.df.copy() outlet_static_pass["control_state"] = "pass" outlet_static_pass["flow_rate"] = flow_rate_pass - outlet_static_pass["min_crest_level"] = ( + outlet_static_pass["meta_min_crest_level"] = ( np.nan ) # min crest level is redundant, as control is defined for both upstream as well as downstream levels @@ -1282,7 +1474,6 @@ def add_discrete_control_partswise(ribasim_model, nodes_to_control, category, st .reset_index(drop=True) ) ribasim_model.pump.static.df = pump_static - # display(pump_static) ### condition #################################################### # create the DiscreteControl condition table @@ -1442,6 +1633,294 @@ def create_linestring(row): return +def clean_tables(ribasim_model): + """Only retain node_id's which are present in the .node table.""" + # Basin + basin_ids = ribasim_model.basin.node.df.loc[ + ribasim_model.basin.node.df.node_type == "Basin", "meta_node_id" + ].to_numpy() + ribasim_model.basin.area = ribasim_model.basin.area.df.loc[ + ribasim_model.basin.area.df.node_id.isin(basin_ids) + ].reset_index(drop=True) + ribasim_model.basin.profile = ribasim_model.basin.profile.df.loc[ + ribasim_model.basin.profile.df.node_id.isin(basin_ids) + ].reset_index(drop=True) + ribasim_model.basin.state = ribasim_model.basin.state.df.loc[ + ribasim_model.basin.state.df.node_id.isin(basin_ids) + ].reset_index(drop=True) + ribasim_model.basin.static = ribasim_model.basin.static.df.loc[ + ribasim_model.basin.static.df.node_id.isin(basin_ids) + ].reset_index(drop=True) + + # Outlet + outlet_ids = ribasim_model.outlet.node.df.loc[ + ribasim_model.outlet.node.df.node_type == "Outlet", "meta_node_id" + ].to_numpy() + ribasim_model.outlet.static = ribasim_model.outlet.static.df.loc[ + ribasim_model.outlet.static.df.node_id.isin(outlet_ids) + ].reset_index(drop=True) + + # Pump + pump_ids = ribasim_model.pump.node.df.loc[ribasim_model.pump.node.df.node_type == "Pump", "meta_node_id"].to_numpy() + ribasim_model.pump.static = ribasim_model.pump.static.df.loc[ + ribasim_model.pump.static.df.node_id.isin(pump_ids) + ].reset_index(drop=True) + + # ManningResistance + manningresistance_ids = ribasim_model.manning_resistance.node.df.loc[ + ribasim_model.manning_resistance.node.df.node_type == "ManningResistance", "meta_node_id" + ].to_numpy() + ribasim_model.manning_resistance.static = ribasim_model.manning_resistance.static.df.loc[ + ribasim_model.manning_resistance.static.df.node_id.isin(manningresistance_ids) + ].reset_index(drop=True) + + # LevelBoundary + levelboundary_ids = ribasim_model.level_boundary.node.df.loc[ + ribasim_model.level_boundary.node.df.node_type == "LevelBoundary", "meta_node_id" + ].to_numpy() + ribasim_model.level_boundary.static = ribasim_model.level_boundary.static.df.loc[ + ribasim_model.level_boundary.static.df.node_id.isin(levelboundary_ids) + ].reset_index(drop=True) + + # identify empty static tables + # Basin + basin_static_missing = ribasim_model.basin.node.df.loc[ + ~ribasim_model.basin.node.df.index.isin(ribasim_model.basin.static.df.node_id) + ] # .index.to_numpy() + if len(basin_static_missing) > 0: + print("\nFollowing node_id's in the Basin.static table are missing:\n", basin_static_missing.index.to_numpy()) + + basin_state_missing = ribasim_model.basin.node.df.loc[ + ~ribasim_model.basin.node.df.index.isin(ribasim_model.basin.state.df.node_id) + ] # .index.to_numpy() + if len(basin_state_missing) > 0: + print("\nFollowing node_id's in the Basin.state table are missing:\n", basin_state_missing.index.to_numpy()) + + basin_profile_missing = ribasim_model.basin.node.df.loc[ + ~ribasim_model.basin.node.df.index.isin(ribasim_model.basin.profile.df.node_id) + ] # .index.to_numpy() + if len(basin_profile_missing) > 0: + print("\nFollowing node_id's in the Basin.profile table are missing:\n", basin_profile_missing.index.to_numpy()) + + basin_area_missing = ribasim_model.basin.node.df.loc[ + ~ribasim_model.basin.node.df.index.isin(ribasim_model.basin.area.df.node_id) + ] # .index.to_numpy() + if len(basin_area_missing) > 0: + print("\nFollowing node_id's in the Basin.area table are missing:\n", basin_area_missing.index.to_numpy()) + + # Outlet + outlet_missing = ribasim_model.outlet.node.df.loc[ + ~ribasim_model.outlet.node.df.index.isin(ribasim_model.outlet.static.df.node_id) + ] # .index.to_numpy() + if len(outlet_missing) > 0: + print("\nFollowing node_id's in the Outlet.static table are missing:\n", outlet_missing.index.to_numpy()) + + # Pump + pump_missing = ribasim_model.pump.node.df.loc[ + ~ribasim_model.pump.node.df.index.isin(ribasim_model.pump.static.df.node_id) + ] # .index.to_numpy() + if len(pump_missing) > 0: + print("\nFollowing node_id's in the pump.static table are missing:\n", pump_missing.index.to_numpy()) + + # Manning resistance + manning_resistance_missing = ribasim_model.manning_resistance.node.df.loc[ + ~ribasim_model.manning_resistance.node.df.index.isin(ribasim_model.manning_resistance.static.df.node_id) + ] # .index.to_numpy() + if len(manning_resistance_missing) > 0: + print( + "\nFollowing node_id's in the manning_resistance.static table are missing\n:", + manning_resistance_missing.index.to_numpy(), + ) + + level_boundary_missing = ribasim_model.level_boundary.node.df.loc[ + ~ribasim_model.level_boundary.node.df.index.isin(ribasim_model.level_boundary.static.df.node_id) + ] # .index.to_numpy() + if len(level_boundary_missing) > 0: + print( + "\nFollowing node_id's in the level_boundary.static table are missing:\n", + level_boundary_missing.index.to_numpy(), + ) + + # check for duplicated indexes in all the node tables + # reating individual DataFrames for each node type + basin_ids_df = pd.DataFrame({"Type": "Basin", "node_id": basin_ids}) + outlet_ids_df = pd.DataFrame({"Type": "Outlet", "node_id": outlet_ids}) + pump_ids_df = pd.DataFrame({"Type": "Pump", "node_id": pump_ids}) + manningresistance_ids_df = pd.DataFrame({"Type": "ManningResistance", "node_id": manningresistance_ids}) + levelboundary_ids_df = pd.DataFrame({"Type": "LevelBoundary", "node_id": levelboundary_ids}) + + # Concatenating all DataFrames into one + combined_df = pd.concat( + [basin_ids_df, outlet_ids_df, pump_ids_df, manningresistance_ids_df, levelboundary_ids_df], ignore_index=True + ) + duplicated_ids = combined_df[combined_df.duplicated(subset="node_id", keep=False)] + + if len(duplicated_ids) > 0: + print("\nThe following node_ids are duplicates: \n", duplicated_ids) + + # check for duplicated indexes in the basin static tables + duplicated_static_basin = ribasim_model.basin.static.df.loc[ + ribasim_model.basin.static.df.duplicated(subset="node_id") + ] + if len(duplicated_static_basin) > 0: + print("\nFollowing indexes are duplicated in the basin.static table:", duplicated_static_basin) + + # check for duplicated indexes in the outlet static tables + duplicated_static_outlet = ribasim_model.outlet.static.df.loc[ + ribasim_model.outlet.static.df.duplicated(subset="node_id") + ] + if len(duplicated_static_outlet) > 0: + print("\nFollowing indexes are duplicated in the outlet.static table:", duplicated_static_outlet) + + # check for duplicated indexes in the pump static tables + duplicated_static_pump = ribasim_model.pump.static.df.loc[ribasim_model.pump.static.df.duplicated(subset="node_id")] + if len(duplicated_static_pump) > 0: + print("\nFollowing indexes are duplicated in the pump.static table:", duplicated_static_pump) + + # check for duplicated indexes in the manning_resistance static tables + duplicated_static_manning_resistance = ribasim_model.manning_resistance.static.df.loc[ + ribasim_model.manning_resistance.static.df.duplicated(subset="node_id") + ] + if len(duplicated_static_manning_resistance) > 0: + print( + "\nFollowing indexes are duplicated in the manning_resistance.static table:", + duplicated_static_manning_resistance, + ) + + # check for duplicated indexes in the level_boundary static tables + duplicated_static_level_boundary = ribasim_model.level_boundary.static.df.loc[ + ribasim_model.level_boundary.static.df.duplicated(subset="node_id") + ] + if len(duplicated_static_level_boundary) > 0: + print( + "\nFollowing indexes are duplicated in the level_boundary.static table:", duplicated_static_level_boundary + ) + + return + + +def find_upstream_downstream_target_levels(ribasim_model, node): + """Find the target levels upstream and downstream from each outlet, and add it as meta data to the outlet.static table.""" + if "utlet" in node: + structure_static = ribasim_model.outlet.static.df.copy(deep=True) + structure_static = structure_static[ + [ + "node_id", + "active", + "flow_rate", + "min_flow_rate", + "max_flow_rate", + "meta_min_crest_level", + "control_state", + "meta_categorie", + ] + ] # prevent errors if the function is ran before + elif "ump" in node: + structure_static = ribasim_model.pump.static.df.copy(deep=True) + structure_static = structure_static[ + [ + "node_id", + "active", + "flow_rate", + "min_flow_rate", + "max_flow_rate", + "control_state", + "meta_func_afvoer", + "meta_func_aanvoer", + "meta_func_circulatie", + "meta_type_verbinding", + "meta_categorie", + ] + ] # prevent errors if the function is ran before + # find upstream basin node_id + structure_static = structure_static.merge( + right=ribasim_model.edge.df[["from_node_id", "to_node_id"]], + left_on="node_id", + right_on="to_node_id", + how="left", + ) + structure_static = structure_static.drop(columns="to_node_id") # remove redundant column + + # find downstream basin node_id + structure_static = structure_static.merge( + right=ribasim_model.edge.df[["to_node_id", "from_node_id"]], + left_on="node_id", + right_on="from_node_id", + how="left", + suffixes=("", "_remove"), + ) + structure_static = structure_static.drop(columns="from_node_id_remove") # remove redundant column + + # merge upstream target level to the outlet static table by using the Basins + structure_static = structure_static.merge( + right=ribasim_model.basin.state.df[["node_id", "level"]], + left_on="to_node_id", + right_on="node_id", + how="left", + suffixes=("", "_remove"), + ) + structure_static = structure_static.rename(columns={"level": "to_basin_level"}) + structure_static = structure_static.drop(columns="node_id_remove") # remove redundant column + + structure_static = structure_static.merge( + right=ribasim_model.basin.state.df[["node_id", "level"]], + left_on="from_node_id", + right_on="node_id", + how="left", + suffixes=("", "_remove"), + ) + structure_static = structure_static.rename(columns={"level": "from_basin_level"}) + structure_static = structure_static.drop(columns="node_id_remove") # remove redundant column + + # merge upstream target level to the outlet static table by using the LevelBoundaries + structure_static = structure_static.merge( + right=ribasim_model.level_boundary.static.df[["node_id", "level"]], + left_on="to_node_id", + right_on="node_id", + how="left", + suffixes=("", "_remove"), + ) + structure_static = structure_static.rename(columns={"level": "to_LevelBoundary_level"}) + structure_static = structure_static.drop(columns="node_id_remove") # remove redundant column + + structure_static = structure_static.merge( + right=ribasim_model.level_boundary.static.df[["node_id", "level"]], + left_on="from_node_id", + right_on="node_id", + how="left", + suffixes=("", "_remove"), + ) + structure_static = structure_static.rename(columns={"level": "from_LevelBoundary_level"}) + structure_static = structure_static.drop(columns="node_id_remove") # remove redundant column + + # fill new columns with the upstream target levels of both Basins as well as LevelBoundaries + structure_static["from_level"] = structure_static["from_basin_level"].fillna( + structure_static["from_LevelBoundary_level"] + ) + structure_static["to_level"] = structure_static["to_basin_level"].fillna(structure_static["to_LevelBoundary_level"]) + + # drop the redundant columns, and prepare column names for Ribasim + structure_static = structure_static.drop( + columns=["to_basin_level", "from_basin_level", "to_LevelBoundary_level", "from_LevelBoundary_level"] + ) + structure_static = structure_static.rename( + columns={ + "from_node_id": "meta_from_node_id", + "to_node_id": "meta_to_node_id", + "from_level": "meta_from_level", + "to_level": "meta_to_level", + } + ) + + # replace the old ribasim_model.____.static.df with the updated structure_static df + if "utlet" in node: + ribasim_model.outlet.static = structure_static + elif "ump" in node: + ribasim_model.pump.static = structure_static + + return + + ##################### Recycle bin ########################## # def calculate_update_basin_area(ribasim_model, percentage): # """ From 7a922fb9d72b3829109142119af85c5fc53dd6b1 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Mon, 21 Oct 2024 13:31:47 +0200 Subject: [PATCH 14/23] Debuggen Aa en Maas (#167) @visr, let op, deze moet voor fix_wrij, omdat het fix-script van wrij hier niet in hoort te zitten --------- Co-authored-by: Martijn Visser --- notebooks/aa_en_maas/01_fix_model_network.py | 280 +++++++++++++++++++ 1 file changed, 280 insertions(+) create mode 100644 notebooks/aa_en_maas/01_fix_model_network.py diff --git a/notebooks/aa_en_maas/01_fix_model_network.py b/notebooks/aa_en_maas/01_fix_model_network.py new file mode 100644 index 0000000..f5b9270 --- /dev/null +++ b/notebooks/aa_en_maas/01_fix_model_network.py @@ -0,0 +1,280 @@ +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin, level_boundary, manning_resistance, outlet +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "AaenMaas" +short_name = "aam" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + + +# %% read model +model = Model.read(ribasim_toml) +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_fix_model_network", f"{short_name}.toml") +network_validator = NetworkValidator(model) + + +hydroobject_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject", fid_as_index=True +) + + +# %% some stuff we'll need again +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +level_data = level_boundary.Static(level=[0]) + +basin_data = [ + basin.Profile(level=[0.0, 1.0], area=[0.01, 1000.0]), + basin.Static( + drainage=[0.0], + potential_evaporation=[0.001 / 86400], + infiltration=[0.0], + precipitation=[0.005 / 86400], + ), + basin.State(level=[0]), +] +outlet_data = outlet.Static(flow_rate=[100]) + + +# HIER KOMEN ISSUES + +# %% https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2421617819 + +# Verwijderen duplicate edges +model.edge.df.drop_duplicates(inplace=True) + +# %% https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2421959240 + +# Verwijderen edge met 0m lengte +model.remove_node(34, remove_edges=True) +model.update_node(1568, "LevelBoundary", data=[level_data], node_properties={"name": ""}) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2421946693 +# toevoegen ontbrekende basins + +basin_edges_df = network_validator.edge_incorrect_connectivity() +basin_nodes_df = network_validator.node_invalid_connectivity() + +for row in basin_nodes_df.itertuples(): + # maak basin-node + basin_node = model.basin.add(Node(geometry=row.geometry), tables=basin_data) + + # update edge_table + model.edge.df.loc[basin_edges_df[basin_edges_df.from_node_id == row.node_id].index, ["from_node_id"]] = ( + basin_node.node_id + ) + model.edge.df.loc[basin_edges_df[basin_edges_df.to_node_id == row.node_id].index, ["to_node_id"]] = ( + basin_node.node_id + ) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2421991252 + +# Corrigeren netwerk Den Bosch + +# Binnenstad +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[2788, "geometry"].boundary.geoms[0]), tables=basin_data) + +model.reverse_edge(edge_id=2077) +model.redirect_edge(edge_id=2077, to_node_id=basin_node.node_id) +model.redirect_edge(edge_id=2078, from_node_id=basin_node.node_id) +model.redirect_edge(edge_id=2079, from_node_id=basin_node.node_id) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[4999, "geometry"].interpolate(0.5, normalized=True)), tables=[outlet_data] +) +model.edge.add(model.level_boundary[46], outlet_node) +model.edge.add(outlet_node, basin_node) + +# Dommel +basin_node = model.basin.add( + Node(geometry=hydroobject_gdf.at[9055, "geometry"].interpolate(0.5, normalized=True)), tables=basin_data +) +model.redirect_edge(edge_id=2082, from_node_id=basin_node.node_id) + +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[9055, "geometry"].interpolate(0.1, normalized=True)), tables=[outlet_data] +) + +model.edge.add(model.level_boundary[49], outlet_node) +model.edge.add(outlet_node, basin_node) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2422078500 + +# Corrigeren netwerk bij sluis Empel +for node_id in [729, 730, 1990, 1962]: + model.remove_node(node_id, remove_edges=True) + + +# Omkeren edgerichting +for edge_id in [131, 398, 406, 495, 513, 515, 894]: + model.reverse_edge(edge_id=edge_id) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2422164355 + +# Corrigeren netwerk bij Spuisluis Crèvecoeur +model.remove_node(411, remove_edges=True) +model.remove_node(4, remove_edges=True) +model.redirect_edge(edge_id=2018, to_node_id=1950) + +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[4825, "geometry"].boundary.geoms[0], name="Spuisluis Crèvecoeur"), + tables=[outlet_data], +) + +model.edge.add(outlet_node, model.level_boundary[5]) +model.edge.add(model.basin[1627], outlet_node) + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2422373708 + +# Corrigeren Afleidingskanaal bij Holthees + +# nabij grens +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[8868, "geometry"].boundary.geoms[0]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[4680, "geometry"].interpolate(0.1, normalized=True)), tables=[outlet_data] +) + +model.redirect_edge(edge_id=2091, from_node_id=56, to_node_id=outlet_node.node_id) +model.redirect_edge(edge_id=2092, from_node_id=outlet_node.node_id, to_node_id=basin_node.node_id) +model.redirect_edge(edge_id=2093, from_node_id=653, to_node_id=basin_node.node_id) +model.redirect_edge(edge_id=2094, from_node_id=basin_node.node_id, to_node_id=82) + +# nabij afleidingskanaal +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[7466, "geometry"].boundary.geoms[0]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[8456, "geometry"].interpolate(0.1, normalized=True)), tables=[outlet_data] +) +model.redirect_edge(edge_id=2089, to_node_id=outlet_node.node_id) +model.redirect_edge(edge_id=2088, from_node_id=outlet_node.node_id, to_node_id=basin_node.node_id) +model.edge.add(model.tabulated_rating_curve[82], basin_node) +model.edge.add(basin_node, model.tabulated_rating_curve[853]) + +# nabij Maas +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[2385, "geometry"].boundary.geoms[0]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[2539, "geometry"].boundary.geoms[0]), tables=[outlet_data] +) + +model.redirect_edge(edge_id=2054, to_node_id=basin_node.node_id) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[26]) + + +# %% see: https://github.com/Deltares/Ribasim-NL/issues/149#issuecomment-2422452167 + +# Toevoegen basin bij Oude Zuid-Willemsvaart +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[3174, "geometry"].boundary.geoms[0], name="Sluis 9"), tables=[outlet_data] +) +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[6499, "geometry"].boundary.geoms[0]), tables=basin_data) +model.redirect_edge(edge_id=2102, to_node_id=outlet_node.node_id) +model.edge.add(outlet_node, basin_node) +model.redirect_edge(edge_id=2106, to_node_id=2026) + +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[646, "geometry"].interpolate(0.9, normalized=True)), tables=[outlet_data] +) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[66]) + +# EINDE ISSUES + + +# %% +# corrigeren knoop-topologie + +# ManningResistance bovenstrooms LevelBoundary naar Outlet +for row in network_validator.edge_incorrect_type_connectivity().itertuples(): + model.update_node(row.from_node_id, "Outlet", data=[outlet_data]) + +# Inlaten van ManningResistance naar Outlet +for row in network_validator.edge_incorrect_type_connectivity( + from_node_type="LevelBoundary", to_node_type="ManningResistance" +).itertuples(): + model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) + + +## UPDATEN STATIC TABLES + +# %% +# basin-profielen/state updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), + "level": [0.0, 1.0] * len(model.basin.node.df), + "area": [0.01, 1000.0] * len(model.basin.node.df), + } +) +df.index.name = "fid" +model.basin.profile.df = df + +df = model.basin.profile.df.groupby("node_id")[["level"]].max().reset_index() +df.index.name = "fid" +model.basin.state.df = df + +# %% +# tabulated_rating_curves updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.tabulated_rating_curve.node.df.index.to_numpy(), 2), + "level": [0.0, 5] * len(model.tabulated_rating_curve.node.df), + "flow_rate": [0, 0.1] * len(model.tabulated_rating_curve.node.df), + } +) +df.index.name = "fid" +model.tabulated_rating_curve.static.df = df + + +# %% + +# level_boundaries updaten +df = pd.DataFrame( + { + "node_id": model.level_boundary.node.df.index.to_list(), + "level": [0.0] * len(model.level_boundary.node.df), + } +) +df.index.name = "fid" +model.level_boundary.static.df = df + +# %% +# manning_resistance updaten +length = len(model.manning_resistance.node.df) +df = pd.DataFrame( + { + "node_id": model.manning_resistance.node.df.index.to_list(), + "length": [100.0] * length, + "manning_n": [100.0] * length, + "profile_width": [100.0] * length, + "profile_slope": [100.0] * length, + } +) +df.index.name = "fid" +model.manning_resistance.static.df = df + +# %% +# flow boundaries updaten +length = len(model.flow_boundary.node.df) +df = pd.DataFrame( + { + "node_id": model.flow_boundary.node.df.index.to_list(), + "flow_rate": [0.0] * length, + } +) +df.index.name = "fid" +model.flow_boundary.static.df = df + + +# %% write model +model.use_validation = True +model.write(ribasim_toml) + +# %% From 28748c396c8d4098d00da4803970dc971f6eb3d9 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Mon, 21 Oct 2024 13:33:18 +0200 Subject: [PATCH 15/23] Debuggen Rijn en IJssel (#168) Co-authored-by: Martijn Visser --- .../rijn_en_ijssel/01_fix_model_network.py | 143 +++++++++--------- 1 file changed, 72 insertions(+), 71 deletions(-) diff --git a/notebooks/rijn_en_ijssel/01_fix_model_network.py b/notebooks/rijn_en_ijssel/01_fix_model_network.py index 77a0ba0..7d81d75 100644 --- a/notebooks/rijn_en_ijssel/01_fix_model_network.py +++ b/notebooks/rijn_en_ijssel/01_fix_model_network.py @@ -5,17 +5,22 @@ from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet from ribasim_nl import CloudStorage, Model, NetworkValidator -from shapely.geometry import MultiPolygon cloud = CloudStorage() -authority = "ValleienVeluwe" -short_name = "venv" +authority = "RijnenIJssel" +short_name = "wrij" ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") database_gpkg = ribasim_toml.with_name("database.gpkg") -split_line_gdf = gpd.read_file( - cloud.joinpath(authority, "verwerkt", "fix_user_data.gpkg"), layer="split_basins", fid_as_index=True + + +hydroobject_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject", fid_as_index=True +) + +duiker_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="duikersifonhevel", fid_as_index=True ) # %% read model @@ -40,18 +45,16 @@ outlet_data = outlet.Static(flow_rate=[100]) -# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401873626 +# %% see: https://github.com/Deltares/Ribasim-NL/issues/151#issuecomment-2419605149 # Verwijderen duplicate edges model.edge.df.drop_duplicates(inplace=True) -# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401876430 +# %% see: https://github.com/Deltares/Ribasim-NL/issues/151#issuecomment-2419620184 +# toevoegen ontbrekende basins -# Toevoegen ontbrekende basins (oplossen topologie) basin_edges_df = network_validator.edge_incorrect_connectivity() basin_nodes_df = network_validator.node_invalid_connectivity() -basin_edges_df.to_file("basin_edges.gpkg") -basin_nodes_df.to_file("basin_nodes.gpkg") for row in basin_nodes_df.itertuples(): # maak basin-node @@ -65,66 +68,73 @@ basin_node.node_id ) +# %% see: https://github.com/Deltares/Ribasim-NL/issues/151#issuecomment-2419649171 +# update edge administratie -# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2401959032 - -# Oplossen verkeerde takrichting -for edge_id in [1353, 933, 373, 401, 4, 1338]: - model.reverse_edge(edge_id=edge_id) - -# model.invalid_topology_at_node().to_file("topo_errors.gpkg") - +model.edge.df.loc[516, "from_node_id"] = 666 +model.edge.df.loc[520, "from_node_id"] = 667 +model.edge.df.loc[954, "to_node_id"] = 652 +model.edge.df.loc[1271, "to_node_id"] = 662 +model.edge.df.loc[1281, "to_node_id"] = 667 -# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402031275 +# %% see: https://github.com/Deltares/Ribasim-NL/issues/151#issuecomment-2419747636 -# Veluwemeer at Harderwijk verwijderen -for node_id in [24, 694]: - model.remove_node(node_id, remove_edges=True) +# fix edge_richting -# %% see: https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402229646 +# verplaatsen van `LevelBoundary` 47 binnen de basin, updaten naar `Basin` en reversen van `Edge` 1370 +model.move_node(47, hydroobject_gdf.at[8781, "geometry"].boundary.geoms[0]) +model.update_node(47, "Basin", data=basin_data) +model.reverse_edge(edge_id=1370) -# Veluwemeer at Elburg verwijderen -for node_id in [3, 1277]: - model.remove_node(node_id, remove_edges=True) +# omdraaien richting van `Edge` 196 +for edge_id in [196, 188, 472, 513, 560, 391, 566]: + model.reverse_edge(edge_id=edge_id) -# %% https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402257101 +# opruimen basin Arnhem nabij Lauwersgracht +model.remove_node(514, remove_edges=True) +model.remove_node(1101, remove_edges=True) +model.remove_edges([1364, 1363]) -model.fix_unassigned_basin_area() +kdu = duiker_gdf.loc[548] +outlet_node = model.outlet.add( + Node(name=kdu.code, geometry=kdu.geometry.interpolate(0.5, normalized=True), meta_object_type="duikersifonhevel"), + tables=[outlet_data], +) +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[9528, "geometry"].boundary.geoms[0])) +model.edge.add(model.tabulated_rating_curve[265], basin_node) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[43]) +model.edge.add(basin_node, model.pump[264]) +model.edge.add(model.pump[264], model.level_boundary[44]) + +# %% see https://github.com/Deltares/Ribasim-NL/issues/151#issuecomment-2422536079 + +# corrigeren ontbrekende outlets nabij Rijkswateren +for fid, edge_id, boundary_node_id in ((14276, 1331, 19), (14259, 1337, 25), (14683, 1339, 27), (3294, 1355, 38)): + kdu = duiker_gdf.loc[fid] + outlet_node = model.outlet.add( + Node( + name=kdu.code, geometry=kdu.geometry.interpolate(0.5, normalized=True), meta_object_type="duikersifonhevel" + ), + tables=[outlet_data], + ) + model.redirect_edge(edge_id=edge_id, to_node_id=outlet_node.node_id) + model.edge.add(outlet_node, model.level_boundary[boundary_node_id]) -# %% https://github.com/Deltares/Ribasim-NL/issues/148#issuecomment-2402281396 +# 1349 heeft geen duiker +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[10080, "geometry"].interpolate(0.5, normalized=True)), + tables=[outlet_data], +) +model.redirect_edge(edge_id=1349, to_node_id=outlet_node.node_id) +model.edge.add(outlet_node, model.level_boundary[33]) -# Verwijderen basins zonder area of toevoegen/opknippen basin /area -model.split_basin(line=split_line_gdf.at[1, "geometry"]) -model.split_basin(line=split_line_gdf.at[2, "geometry"]) -model.split_basin(line=split_line_gdf.at[3, "geometry"]) -model.merge_basins(basin_id=1150, to_basin_id=1101) -model.merge_basins(basin_id=1196, to_basin_id=1192) -model.merge_basins(basin_id=1202, to_basin_id=1049) -model.merge_basins(basin_id=1207, to_basin_id=837) -model.merge_basins(basin_id=1208, to_basin_id=851, are_connected=False) -model.merge_basins(basin_id=1210, to_basin_id=1090) -model.merge_basins(basin_id=1212, to_basin_id=823) -model.merge_basins(basin_id=1216, to_basin_id=751, are_connected=False) -model.merge_basins(basin_id=1217, to_basin_id=752) -model.merge_basins(basin_id=1219, to_basin_id=814) -model.merge_basins(basin_id=1220, to_basin_id=1118) -model.merge_basins(basin_id=1221, to_basin_id=1170) -model.update_node(1229, "LevelBoundary", data=[level_data]) -model.merge_basins(basin_id=1254, to_basin_id=1091, are_connected=False) -model.merge_basins(basin_id=1260, to_basin_id=1125, are_connected=False) -model.merge_basins(basin_id=1263, to_basin_id=863) -model.merge_basins(basin_id=1265, to_basin_id=974) -model.remove_node(node_id=539, remove_edges=True) -model.merge_basins(basin_id=1267, to_basin_id=1177, are_connected=False) -model.remove_node(1268, remove_edges=True) -model.remove_node(360, remove_edges=True) -model.remove_node(394, remove_edges=True) -model.merge_basins(basin_id=1269, to_basin_id=1087) -model.merge_basins(basin_id=1149, to_basin_id=1270, are_connected=False) +# %% -model.fix_unassigned_basin_area() -model.basin.area.df = model.basin.area.df[~model.basin.area.df.index.isin(model.unassigned_basin_area.index)] +network_validator.edge_incorrect_type_connectivity(from_node_type="Basin", to_node_type="LevelBoundary").to_file( + "basin_to_levelboundary.gpkg" +) # %% # corrigeren knoop-topologie @@ -139,16 +149,8 @@ ).itertuples(): model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) - -# buffer out small slivers -model.basin.area.df.loc[:, ["geometry"]] = ( - model.basin.area.df.buffer(0.1) - .buffer(-0.1) - .apply(lambda x: x if x.geom_type == "MultiPolygon" else MultiPolygon([x])) -) - -# basin-profielen updaten - +# %% +# basin-profielen/state updaten df = pd.DataFrame( { "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), @@ -203,9 +205,8 @@ df.index.name = "fid" model.manning_resistance.static.df = df - # %% write model -# model.use_validation = True +# model.use_validation = False model.write(ribasim_toml) # %% From bf917d7b8ad3107652cee2a9bbc2a5d779a5df29 Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Mon, 21 Oct 2024 13:43:50 +0200 Subject: [PATCH 16/23] AGV post-processing reproducability (#170) Not yet complete, I still have to use more data from the cloud. I converted waterschappen.json to a Python literal, and the notebooks I touched to Python scripts. Steps towards reproducability. --- .pre-commit-config.yaml | 4 +- docs/workflow/level-controlled.qmd | 2 +- .../aa_en_maas/00_review_model_network.py | 1 + notebooks/aa_en_maas/01_fix_model_network.py | 1 + .../klassificatie_waterlijnen.py | 1 + .../nationaal_hoofdwater.py | 3 +- .../oppervlaktewater_belgie.py | 1 + notebooks/basin_area_nodes.py | 1 + .../00_review_model_network.py | 1 + notebooks/de_dommel/01_fix_model_network.py | 3 +- notebooks/de_dommel/02_fix_edges.py | 1 + notebooks/de_dommel/03_fix_basin_area.py | 3 +- notebooks/de_dommel/04_parameterize_model.py | 3 +- notebooks/de_dommel/05_add_berging.py | 3 +- .../01_fix_model_network.py | 1 + .../hunze_en_aas/00_review_model_network.py | 1 + notebooks/ijsselmeermodel/basins.py | 1 + notebooks/ijsselmeermodel/kunstwerken.py | 1 + notebooks/ijsselmeermodel/netwerk.py | 3 +- notebooks/koppelen/01_bergend_gebied.py | 3 +- notebooks/koppelen/02_koppelen.py | 3 +- notebooks/limburg/00_review_model_network.py | 1 + notebooks/modelkwaliteit.py | 1 + .../00_review_model_network.py | 1 + .../rijkswaterstaat/10_analyse_resultaten.py | 1 + .../10_analyse_resultaten_IWP.py | 1 + notebooks/rijkswaterstaat/1_bathymetrie.py | 3 +- notebooks/rijkswaterstaat/2_basins.py | 3 +- notebooks/rijkswaterstaat/3_netwerk.py | 3 +- notebooks/rijkswaterstaat/4_kunstwerken.py | 1 + notebooks/rijkswaterstaat/5_model_netwerk.py | 3 +- notebooks/rijkswaterstaat/6_model_sturing.py | 1 + .../rijkswaterstaat/7_model_onttrekkingen.py | 3 +- notebooks/rijkswaterstaat/8a_update_bc.py | 1 + notebooks/rijkswaterstaat/8b_update_state.py | 1 + .../rijn_en_ijssel/00_review_model_network.py | 1 + .../rijn_en_ijssel/01_fix_model_network.py | 1 + notebooks/samenvoegen_modellen.py | 1 + notebooks/samenvoegen_overig.py | 3 +- .../00_review_model_network.py | 1 + notebooks/uitlaten_inlaten.ipynb | 3 +- .../00_review_model_network.py | 1 + .../vallei_en_veluwe/01_fix_model_network.py | 3 +- .../vechtstromen/00_review_model_network.py | 1 + .../vechtstromen/01_fix_model_network.py | 5 +- open-vscode-dev.bat | 2 + open-vscode.bat | 3 +- pixi.lock | 3870 +++++++++-------- .../hydamo_0_analyse_data_waterboard.ipynb | 5 +- .../run_ribasim_lumping_waterboard.py | 1 + .../xxxx_combine_waterschap_layers.ipynb | 1 + .../01_parse_crossings.ipynb | 209 - src/peilbeheerst_model/01_parse_crossings.py | 128 + .../01_test_parse_crossings.ipynb | 1 + .../01b_ad_krw_to_peilgebieden.ipynb | 7 +- .../AmstelGooienVecht_parametrize.ipynb | 3 +- .../01_shortest_path_Hollandse_Delta.ipynb | 8 +- .../Shortest_path/02_shortest_path_HHSK.ipynb | 6 +- .../Shortest_path/03_shortest_path_HHNK.ipynb | 6 +- .../04_shortest_path_Delfland.ipynb | 6 +- .../05_shortest_path_Scheldestromen.ipynb | 6 +- .../06_shortest_path_Zuiderzeeland.ipynb | 6 +- .../Shortest_path/07_shortest_path_WSRL.ipynb | 6 +- .../08_shortest_path_Wetterskip.ipynb | 6 +- .../09_shortest_path_Rijnland.ipynb | 6 +- .../peilbeheerst_model/__init__.py | 4 +- .../crossings_to_ribasim.py | 3 +- .../postprocess_data/post-process_agv.ipynb | 453 -- .../postprocess_data/post-process_agv.py | 232 + .../preprocess_data/AmstelGooienVecht.ipynb | 5 +- .../ribasim_parametrization.py | 3 +- .../waterschappen.py} | 187 +- src/ribasim_nl/reset_index.py | 1 + src/ribasim_nl/tests/test_cloud.py | 1 + src/ribasim_nl/tests/test_network.py | 3 +- src/ribasim_nl/tests/test_tables.py | 1 + stash/5_model_netwerk_old.py | 1 + stash/5b_upgrade_to_main.py | 1 + stash/6_model_sturing copy.py | 1 + web_app/main.py | 1 + 80 files changed, 2553 insertions(+), 2715 deletions(-) create mode 100644 open-vscode-dev.bat delete mode 100644 src/peilbeheerst_model/01_parse_crossings.ipynb create mode 100644 src/peilbeheerst_model/01_parse_crossings.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py rename src/peilbeheerst_model/{waterschappen.json => peilbeheerst_model/waterschappen.py} (76%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4b2fef2..dc5a4fe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -14,7 +14,7 @@ repos: exclude: '.teamcity' - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.5 + rev: v0.6.9 hooks: - id: ruff types_or: [python, pyi, jupyter] diff --git a/docs/workflow/level-controlled.qmd b/docs/workflow/level-controlled.qmd index e27870c..50e58d4 100644 --- a/docs/workflow/level-controlled.qmd +++ b/docs/workflow/level-controlled.qmd @@ -8,7 +8,7 @@ The paths below are relative to this path. 1. Run the preprocessing notebooks. One notebook per water board, path: `peilbeheerst_model/preprocess_data/` 2. Run the postprocessing notebook. One notebook per water board, path: `peilbeheerst_model/postprocess_data/` -3. Run the crossings notebook. One notebook, path: `01_test_parse_crossings.ipynb` +3. Run the crossings notebook. One notebook, path: `01_parse_crossings.ipynb` 4. Run shortest paths notebooks. One notebook per water board, path: `Shortest_path/` 5. Run crossings to Ribasim notebook. One notebook, all water boards are below each other, path: `02_crossings_to_ribasim_notebook.ipynb` 6. Run parametrize notebooks. One notebook per water board, for now only Amstel, Gooi en Vecht (AGV), path: `Parametrize/AmstelGooienVecht_parametrize.ipynb` diff --git a/notebooks/aa_en_maas/00_review_model_network.py b/notebooks/aa_en_maas/00_review_model_network.py index b408d40..d85221b 100644 --- a/notebooks/aa_en_maas/00_review_model_network.py +++ b/notebooks/aa_en_maas/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/aa_en_maas/01_fix_model_network.py b/notebooks/aa_en_maas/01_fix_model_network.py index f5b9270..5d3307c 100644 --- a/notebooks/aa_en_maas/01_fix_model_network.py +++ b/notebooks/aa_en_maas/01_fix_model_network.py @@ -4,6 +4,7 @@ import pandas as pd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/afleiding_hoofdwateren/klassificatie_waterlijnen.py b/notebooks/afleiding_hoofdwateren/klassificatie_waterlijnen.py index 59581fa..5ddf0e2 100644 --- a/notebooks/afleiding_hoofdwateren/klassificatie_waterlijnen.py +++ b/notebooks/afleiding_hoofdwateren/klassificatie_waterlijnen.py @@ -1,6 +1,7 @@ # %% import geopandas as gpd import pandas as pd + from ribasim_nl import CloudStorage cloud = CloudStorage() diff --git a/notebooks/afleiding_hoofdwateren/nationaal_hoofdwater.py b/notebooks/afleiding_hoofdwateren/nationaal_hoofdwater.py index dd3a0b4..e9fe1e1 100644 --- a/notebooks/afleiding_hoofdwateren/nationaal_hoofdwater.py +++ b/notebooks/afleiding_hoofdwateren/nationaal_hoofdwater.py @@ -1,9 +1,10 @@ # %% import geopandas as gpd import pandas as pd -from ribasim_nl import CloudStorage from shapely.geometry import MultiPolygon +from ribasim_nl import CloudStorage + """ General method used by Kadaster: - dissolve top10NL layer to naam to an internal (not shared) layer diff --git a/notebooks/afleiding_hoofdwateren/oppervlaktewater_belgie.py b/notebooks/afleiding_hoofdwateren/oppervlaktewater_belgie.py index 1436800..241656b 100644 --- a/notebooks/afleiding_hoofdwateren/oppervlaktewater_belgie.py +++ b/notebooks/afleiding_hoofdwateren/oppervlaktewater_belgie.py @@ -1,6 +1,7 @@ # %% import geopandas as gpd import pandas as pd + from ribasim_nl import CloudStorage cloud = CloudStorage() diff --git a/notebooks/basin_area_nodes.py b/notebooks/basin_area_nodes.py index 9f809b0..403b0d0 100644 --- a/notebooks/basin_area_nodes.py +++ b/notebooks/basin_area_nodes.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model cloud = CloudStorage() diff --git a/notebooks/brabantse_delta/00_review_model_network.py b/notebooks/brabantse_delta/00_review_model_network.py index d20b24b..835315d 100644 --- a/notebooks/brabantse_delta/00_review_model_network.py +++ b/notebooks/brabantse_delta/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/de_dommel/01_fix_model_network.py b/notebooks/de_dommel/01_fix_model_network.py index d2ff224..ad31018 100644 --- a/notebooks/de_dommel/01_fix_model_network.py +++ b/notebooks/de_dommel/01_fix_model_network.py @@ -3,9 +3,10 @@ import geopandas as gpd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet -from ribasim_nl import CloudStorage, Model, NetworkValidator from shapely.geometry import Point, Polygon +from ribasim_nl import CloudStorage, Model, NetworkValidator + cloud = CloudStorage() ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_2024_6_3", "model.toml") diff --git a/notebooks/de_dommel/02_fix_edges.py b/notebooks/de_dommel/02_fix_edges.py index 3667b14..905c961 100644 --- a/notebooks/de_dommel/02_fix_edges.py +++ b/notebooks/de_dommel/02_fix_edges.py @@ -1,5 +1,6 @@ # %% import geopandas as gpd + from ribasim_nl import CloudStorage, Model, Network cloud = CloudStorage() diff --git a/notebooks/de_dommel/03_fix_basin_area.py b/notebooks/de_dommel/03_fix_basin_area.py index 50a1636..4a56bd6 100644 --- a/notebooks/de_dommel/03_fix_basin_area.py +++ b/notebooks/de_dommel/03_fix_basin_area.py @@ -1,9 +1,10 @@ # %% import geopandas as gpd import pandas as pd -from ribasim_nl import CloudStorage, Model from shapely.geometry import MultiPolygon +from ribasim_nl import CloudStorage, Model + cloud = CloudStorage() diff --git a/notebooks/de_dommel/04_parameterize_model.py b/notebooks/de_dommel/04_parameterize_model.py index 2071205..c966188 100644 --- a/notebooks/de_dommel/04_parameterize_model.py +++ b/notebooks/de_dommel/04_parameterize_model.py @@ -2,9 +2,10 @@ import geopandas as gpd import pandas as pd from ribasim.nodes import manning_resistance, pump +from shapely.geometry import MultiLineString + from ribasim_nl import CloudStorage, Model from ribasim_nl.structure_node import get_outlet, get_tabulated_rating_curve -from shapely.geometry import MultiLineString PROFIEL_ID_COLUMN = "PROFIELLIJNID" PROFIEL_LINE_ID_COLUMN = "profiel_id" diff --git a/notebooks/de_dommel/05_add_berging.py b/notebooks/de_dommel/05_add_berging.py index bcd8f43..2124b86 100644 --- a/notebooks/de_dommel/05_add_berging.py +++ b/notebooks/de_dommel/05_add_berging.py @@ -4,10 +4,11 @@ import pandas as pd from ribasim import Node from ribasim.nodes import basin +from shapely.geometry import LineString + from ribasim_nl import CloudStorage, Model from ribasim_nl.berging import add_basin_statistics, get_basin_profile, get_rating_curve from ribasim_nl.geometry import basin_to_point -from shapely.geometry import LineString cloud = CloudStorage() diff --git a/notebooks/drents_overijsselse_delta/01_fix_model_network.py b/notebooks/drents_overijsselse_delta/01_fix_model_network.py index fd1aa83..22dde82 100644 --- a/notebooks/drents_overijsselse_delta/01_fix_model_network.py +++ b/notebooks/drents_overijsselse_delta/01_fix_model_network.py @@ -4,6 +4,7 @@ import pandas as pd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet + from ribasim_nl import CloudStorage, Model, NetworkValidator from ribasim_nl.geometry import split_basin_multi_polygon diff --git a/notebooks/hunze_en_aas/00_review_model_network.py b/notebooks/hunze_en_aas/00_review_model_network.py index 2640da1..7531165 100644 --- a/notebooks/hunze_en_aas/00_review_model_network.py +++ b/notebooks/hunze_en_aas/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/ijsselmeermodel/basins.py b/notebooks/ijsselmeermodel/basins.py index 164cdda..7e0ea90 100644 --- a/notebooks/ijsselmeermodel/basins.py +++ b/notebooks/ijsselmeermodel/basins.py @@ -4,6 +4,7 @@ import geopandas as gpd import pandas as pd + from hydamo import code_utils from ribasim_nl.utils.geometry import cut_basin, drop_z from ribasim_nl.utils.geoseries import basins_to_points diff --git a/notebooks/ijsselmeermodel/kunstwerken.py b/notebooks/ijsselmeermodel/kunstwerken.py index 4b0ddfc..49f2662 100644 --- a/notebooks/ijsselmeermodel/kunstwerken.py +++ b/notebooks/ijsselmeermodel/kunstwerken.py @@ -4,6 +4,7 @@ import geopandas as gpd import pandas as pd + from hydamo import code_utils DATA_DIR = Path(os.getenv("RIBASIM_NL_DATA_DIR")) diff --git a/notebooks/ijsselmeermodel/netwerk.py b/notebooks/ijsselmeermodel/netwerk.py index 39a13b6..d6f9484 100644 --- a/notebooks/ijsselmeermodel/netwerk.py +++ b/notebooks/ijsselmeermodel/netwerk.py @@ -5,10 +5,11 @@ import geopandas as gpd import networkx as nx import pandas as pd -from hydamo import HyDAMO from shapely.geometry import LineString from shapely.ops import snap, split +from hydamo import HyDAMO + DATA_DIR = Path(os.getenv("RIBASIM_NL_DATA_DIR")) MODEL_DIR = Path(os.getenv("RIBASIM_NL_MODEL_DIR")) / "ijsselmeer" MODEL_DATA_GPKG = Path(MODEL_DIR) / "model_data.gpkg" diff --git a/notebooks/koppelen/01_bergend_gebied.py b/notebooks/koppelen/01_bergend_gebied.py index 8038159..1ccc0a8 100644 --- a/notebooks/koppelen/01_bergend_gebied.py +++ b/notebooks/koppelen/01_bergend_gebied.py @@ -3,11 +3,12 @@ import pandas as pd from ribasim import Node from ribasim.nodes import basin +from shapely.geometry import LineString, MultiPolygon + from ribasim_nl import CloudStorage, Model from ribasim_nl.berging import add_basin_statistics, get_basin_profile, get_rating_curve from ribasim_nl.geodataframe import split_basins from ribasim_nl.geometry import basin_to_point -from shapely.geometry import LineString, MultiPolygon cloud = CloudStorage() diff --git a/notebooks/koppelen/02_koppelen.py b/notebooks/koppelen/02_koppelen.py index 08568f3..043a8f8 100644 --- a/notebooks/koppelen/02_koppelen.py +++ b/notebooks/koppelen/02_koppelen.py @@ -1,9 +1,10 @@ # %% from networkx import NetworkXNoPath +from shapely.geometry import LineString + from ribasim_nl import CloudStorage, Model, Network, reset_index from ribasim_nl.case_conversions import pascal_to_snake_case from ribasim_nl.concat import concat -from shapely.geometry import LineString cloud = CloudStorage() diff --git a/notebooks/limburg/00_review_model_network.py b/notebooks/limburg/00_review_model_network.py index 2fc14ff..a2220d0 100644 --- a/notebooks/limburg/00_review_model_network.py +++ b/notebooks/limburg/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/modelkwaliteit.py b/notebooks/modelkwaliteit.py index a975a12..871f169 100644 --- a/notebooks/modelkwaliteit.py +++ b/notebooks/modelkwaliteit.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model cloud = CloudStorage() diff --git a/notebooks/noorderzijlvest/00_review_model_network.py b/notebooks/noorderzijlvest/00_review_model_network.py index ab05e94..d340e86 100644 --- a/notebooks/noorderzijlvest/00_review_model_network.py +++ b/notebooks/noorderzijlvest/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/rijkswaterstaat/10_analyse_resultaten.py b/notebooks/rijkswaterstaat/10_analyse_resultaten.py index 0675b31..fe1ba89 100644 --- a/notebooks/rijkswaterstaat/10_analyse_resultaten.py +++ b/notebooks/rijkswaterstaat/10_analyse_resultaten.py @@ -3,6 +3,7 @@ import pandas as pd import ribasim + from ribasim_nl import CloudStorage cloud = CloudStorage() diff --git a/notebooks/rijkswaterstaat/10_analyse_resultaten_IWP.py b/notebooks/rijkswaterstaat/10_analyse_resultaten_IWP.py index 7cf17b3..51bbe49 100644 --- a/notebooks/rijkswaterstaat/10_analyse_resultaten_IWP.py +++ b/notebooks/rijkswaterstaat/10_analyse_resultaten_IWP.py @@ -6,6 +6,7 @@ import pandas as pd import requests import ribasim + from ribasim_nl import CloudStorage # %% Nieuwe excel inlezen diff --git a/notebooks/rijkswaterstaat/1_bathymetrie.py b/notebooks/rijkswaterstaat/1_bathymetrie.py index 2d78546..344f48e 100644 --- a/notebooks/rijkswaterstaat/1_bathymetrie.py +++ b/notebooks/rijkswaterstaat/1_bathymetrie.py @@ -13,9 +13,10 @@ from rasterio.enums import Resampling from rasterio.transform import from_origin from rasterio.windows import from_bounds -from ribasim_nl import CloudStorage from shapely.geometry import MultiPolygon, box +from ribasim_nl import CloudStorage + cloud = CloudStorage() diff --git a/notebooks/rijkswaterstaat/2_basins.py b/notebooks/rijkswaterstaat/2_basins.py index 1d51eac..66f63fd 100644 --- a/notebooks/rijkswaterstaat/2_basins.py +++ b/notebooks/rijkswaterstaat/2_basins.py @@ -1,10 +1,11 @@ # %% import geopandas as gpd import pandas as pd +from shapely.geometry import MultiLineString, MultiPolygon, Polygon + from ribasim_nl import CloudStorage from ribasim_nl.geodataframe import split_basins from ribasim_nl.raster import sample_level_area -from shapely.geometry import MultiLineString, MultiPolygon, Polygon cloud = CloudStorage() diff --git a/notebooks/rijkswaterstaat/3_netwerk.py b/notebooks/rijkswaterstaat/3_netwerk.py index 290422f..040db0d 100644 --- a/notebooks/rijkswaterstaat/3_netwerk.py +++ b/notebooks/rijkswaterstaat/3_netwerk.py @@ -2,9 +2,10 @@ import geopandas as gpd import numpy as np import pandas as pd -from ribasim_nl import CloudStorage, Network from shapely.geometry import LineString, Point +from ribasim_nl import CloudStorage, Network + cloud = CloudStorage() # %% read files diff --git a/notebooks/rijkswaterstaat/4_kunstwerken.py b/notebooks/rijkswaterstaat/4_kunstwerken.py index f4aba96..fb73166 100644 --- a/notebooks/rijkswaterstaat/4_kunstwerken.py +++ b/notebooks/rijkswaterstaat/4_kunstwerken.py @@ -8,6 +8,7 @@ import geopandas as gpd import pandas as pd + from ribasim_nl import CloudStorage cloud = CloudStorage() diff --git a/notebooks/rijkswaterstaat/5_model_netwerk.py b/notebooks/rijkswaterstaat/5_model_netwerk.py index 0823490..9ea2ac6 100644 --- a/notebooks/rijkswaterstaat/5_model_netwerk.py +++ b/notebooks/rijkswaterstaat/5_model_netwerk.py @@ -16,10 +16,11 @@ pump, tabulated_rating_curve, ) +from shapely.geometry import LineString, MultiLineString + from ribasim_nl import CloudStorage, Model, Network from ribasim_nl import discrete_control as dc from ribasim_nl.case_conversions import pascal_to_snake_case -from shapely.geometry import LineString, MultiLineString warnings.filterwarnings( action="ignore", diff --git a/notebooks/rijkswaterstaat/6_model_sturing.py b/notebooks/rijkswaterstaat/6_model_sturing.py index 4c10772..4149465 100644 --- a/notebooks/rijkswaterstaat/6_model_sturing.py +++ b/notebooks/rijkswaterstaat/6_model_sturing.py @@ -10,6 +10,7 @@ pump, tabulated_rating_curve, ) + from ribasim_nl import CloudStorage, Model from ribasim_nl import discrete_control as dc diff --git a/notebooks/rijkswaterstaat/7_model_onttrekkingen.py b/notebooks/rijkswaterstaat/7_model_onttrekkingen.py index 470726e..0ab3e9a 100644 --- a/notebooks/rijkswaterstaat/7_model_onttrekkingen.py +++ b/notebooks/rijkswaterstaat/7_model_onttrekkingen.py @@ -2,9 +2,10 @@ import geopandas as gpd from ribasim import Node from ribasim.nodes import user_demand -from ribasim_nl import CloudStorage, Model, Network from shapely.geometry import LineString, Point +from ribasim_nl import CloudStorage, Model, Network + def add_demand( model: Model, diff --git a/notebooks/rijkswaterstaat/8a_update_bc.py b/notebooks/rijkswaterstaat/8a_update_bc.py index 6f03179..f91172f 100644 --- a/notebooks/rijkswaterstaat/8a_update_bc.py +++ b/notebooks/rijkswaterstaat/8a_update_bc.py @@ -5,6 +5,7 @@ import pandas as pd from ribasim import Model from ribasim.nodes import flow_boundary + from ribasim_nl import CloudStorage cloud = CloudStorage() diff --git a/notebooks/rijkswaterstaat/8b_update_state.py b/notebooks/rijkswaterstaat/8b_update_state.py index f6aaa5f..fbc009d 100644 --- a/notebooks/rijkswaterstaat/8b_update_state.py +++ b/notebooks/rijkswaterstaat/8b_update_state.py @@ -1,6 +1,7 @@ # %% import pandas as pd from ribasim import Model + from ribasim_nl import CloudStorage cloud = CloudStorage() diff --git a/notebooks/rijn_en_ijssel/00_review_model_network.py b/notebooks/rijn_en_ijssel/00_review_model_network.py index b15fbd3..0a96828 100644 --- a/notebooks/rijn_en_ijssel/00_review_model_network.py +++ b/notebooks/rijn_en_ijssel/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/rijn_en_ijssel/01_fix_model_network.py b/notebooks/rijn_en_ijssel/01_fix_model_network.py index 7d81d75..f25952f 100644 --- a/notebooks/rijn_en_ijssel/01_fix_model_network.py +++ b/notebooks/rijn_en_ijssel/01_fix_model_network.py @@ -4,6 +4,7 @@ import pandas as pd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/samenvoegen_modellen.py b/notebooks/samenvoegen_modellen.py index 158d057..1813426 100644 --- a/notebooks/samenvoegen_modellen.py +++ b/notebooks/samenvoegen_modellen.py @@ -3,6 +3,7 @@ import numpy as np import ribasim + from ribasim_nl import CloudStorage from ribasim_nl.case_conversions import pascal_to_snake_case from ribasim_nl.concat import concat diff --git a/notebooks/samenvoegen_overig.py b/notebooks/samenvoegen_overig.py index 836555f..142d262 100644 --- a/notebooks/samenvoegen_overig.py +++ b/notebooks/samenvoegen_overig.py @@ -1,9 +1,10 @@ # %% import geopandas as gpd import pandas as pd +from shapely.geometry import MultiPolygon + from ribasim_nl import CloudStorage from ribasim_nl.geometry import drop_z -from shapely.geometry import MultiPolygon # %% cloud = CloudStorage() diff --git a/notebooks/stichtse_rijnlanden/00_review_model_network.py b/notebooks/stichtse_rijnlanden/00_review_model_network.py index 90bc46e..b6ccaa6 100644 --- a/notebooks/stichtse_rijnlanden/00_review_model_network.py +++ b/notebooks/stichtse_rijnlanden/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/uitlaten_inlaten.ipynb b/notebooks/uitlaten_inlaten.ipynb index 60433f5..c914db2 100644 --- a/notebooks/uitlaten_inlaten.ipynb +++ b/notebooks/uitlaten_inlaten.ipynb @@ -14,9 +14,10 @@ "import geopandas as gpd\n", "import pandas as pd\n", "import requests\n", - "from hydamo import HyDAMO, code_utils\n", "from shapely.geometry import Point\n", "\n", + "from hydamo import HyDAMO, code_utils\n", + "\n", "warnings.simplefilter(\"ignore\", UserWarning)" ] }, diff --git a/notebooks/vallei_en_veluwe/00_review_model_network.py b/notebooks/vallei_en_veluwe/00_review_model_network.py index 0b9f470..2bfcf93 100644 --- a/notebooks/vallei_en_veluwe/00_review_model_network.py +++ b/notebooks/vallei_en_veluwe/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/vallei_en_veluwe/01_fix_model_network.py b/notebooks/vallei_en_veluwe/01_fix_model_network.py index 77a0ba0..0fe3f28 100644 --- a/notebooks/vallei_en_veluwe/01_fix_model_network.py +++ b/notebooks/vallei_en_veluwe/01_fix_model_network.py @@ -4,9 +4,10 @@ import pandas as pd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet -from ribasim_nl import CloudStorage, Model, NetworkValidator from shapely.geometry import MultiPolygon +from ribasim_nl import CloudStorage, Model, NetworkValidator + cloud = CloudStorage() authority = "ValleienVeluwe" diff --git a/notebooks/vechtstromen/00_review_model_network.py b/notebooks/vechtstromen/00_review_model_network.py index bab28cc..407b85d 100644 --- a/notebooks/vechtstromen/00_review_model_network.py +++ b/notebooks/vechtstromen/00_review_model_network.py @@ -1,6 +1,7 @@ # %% import pandas as pd + from ribasim_nl import CloudStorage, Model, NetworkValidator cloud = CloudStorage() diff --git a/notebooks/vechtstromen/01_fix_model_network.py b/notebooks/vechtstromen/01_fix_model_network.py index 7d94912..cdbfe5e 100644 --- a/notebooks/vechtstromen/01_fix_model_network.py +++ b/notebooks/vechtstromen/01_fix_model_network.py @@ -4,11 +4,12 @@ import pandas as pd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet -from ribasim_nl import CloudStorage, Model, NetworkValidator -from ribasim_nl.geometry import edge, split_basin, split_basin_multi_polygon from shapely.geometry import LineString, MultiPolygon, Point, Polygon from shapely.ops import nearest_points +from ribasim_nl import CloudStorage, Model, NetworkValidator +from ribasim_nl.geometry import edge, split_basin, split_basin_multi_polygon + cloud = CloudStorage() ribasim_toml = cloud.joinpath("Vechtstromen", "modellen", "Vechtstromen_2024_6_3", "vechtstromen.toml") diff --git a/open-vscode-dev.bat b/open-vscode-dev.bat new file mode 100644 index 0000000..0c2e0a4 --- /dev/null +++ b/open-vscode-dev.bat @@ -0,0 +1,2 @@ +REM open using ribasim python main branch +pixi run --environment=dev code . | exit diff --git a/open-vscode.bat b/open-vscode.bat index ae86837..221f377 100644 --- a/open-vscode.bat +++ b/open-vscode.bat @@ -1 +1,2 @@ -pixi run --environment=dev code . | exit +REM open using ribasim python release +pixi run code . | exit diff --git a/pixi.lock b/pixi.lock index 2a4e83e..1188e17 100644 --- a/pixi.lock +++ b/pixi.lock @@ -12,45 +12,45 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.12-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-h57bd9a3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h29ce20c_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h5e77a74_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h4e6ae90_11.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-h02abb05_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.6-h834ce55_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h756ea98_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-h3e6eb3e_6.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h9f1560d_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.9.0-hd126650_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h1d30c4a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-ha3822c6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-h0f25b8a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-he1a10d6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hae4d56a_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.29-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h2bff981_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h19b0707_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h14a7884_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.19-hc9e6898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-hb8d5873_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.7-h666547d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h2bff981_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h2bff981_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-hbe26082_8.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h25d6d5c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/black-24.10.0-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.1-heb4867d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -59,32 +59,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-ha728647_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.2-py312h66e93f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.3-py312h178313f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dart-sass-1.58.3-ha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.6-py312h2ec8cdc_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.7-py312h2ec8cdc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-1.41.0-hfc7925d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-dom-0.1.35-hd9586b0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.24.0-ha770c72_0.conda @@ -121,7 +121,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda @@ -170,17 +170,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h364f349_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-ha07344c_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.1-default_hb5137d0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.1-default_h9c6a7e4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.2-default_hb5137d0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.2-default_h9c6a7e4_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda @@ -192,8 +192,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hd5b9bfb_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda @@ -208,42 +208,42 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.1-h2ff4ddf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.29.0-h438788a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.29.0-h0121fbd_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.30.0-h438788a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.30.0-h0121fbd_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.65.5-hf5c653b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.1-ha7bfdaf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.2-ha7bfdaf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.27.5-h5b01275_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.11.01-hbbce691_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2024.07.02-hbbce691_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_11.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2 @@ -263,7 +263,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.1-py312h178313f_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -274,7 +274,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.12.0-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda @@ -283,13 +283,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.105-hd34e28f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.2-py312h58c1407_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda @@ -307,10 +307,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h56024de_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda @@ -320,26 +321,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.4-py312h12e396e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.10.0-py312he8b4914_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.3-py312h91f0f75_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.8.0-py312h91f0f75_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda @@ -352,14 +352,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.8.0-h6e8976b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.57-ha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.4.1-py312h8456570_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.11.01-h77b4e00_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h77b4e00_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda @@ -370,9 +370,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.9-py312hd18ad41_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.4-h1380c3d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.5-h3931f03_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda @@ -391,7 +391,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hedb9d39_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hcd2843e_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 @@ -403,7 +403,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20241016-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda @@ -415,7 +415,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.3-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda @@ -447,7 +447,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda @@ -467,46 +467,46 @@ environments: osx-64: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312hb553811_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hb28a666_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hcd1ed9e_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-h2f86973_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_11.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h9d7d61c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.6-hd01826e_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h8128ea2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-hef75ebe_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-h2e282c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.13.0-hf8dbe3c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.9.0-h60298e3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h2259716_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-hdeff353_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h2123174_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hd993245_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h40772b6_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.29-ha44c9a9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h40772b6_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-ha4d96b0_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-he927b17_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.19-h99e8e40_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h3b26a6a_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.7-hdec1378_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h40772b6_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h40772b6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-h8479783_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-hf1b494f_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.14.0-h9a36307_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.10.0-ha4e2ba9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h3d2f5f1_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-h1ccc5ac_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h86941f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/black-24.10.0-py312hb401068_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/blosc-1.21.6-h7d75f6d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312h5861a67_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.1-h44e7173_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.2-h32b1619_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -515,31 +515,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.1-py312hf857d28_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-ha105788_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-h47b6969_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.2-py312h3d0f464_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.3-py312hbe3f5e4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cyrus-sasl-2.1.27-hf9bab2b_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-1.0.0-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/dart-sass-1.58.3-h694c41f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.6-py312h5861a67_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.7-py312h5861a67_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-1.41.0-h86af993_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-dom-0.1.35-h08cba0f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.24.0-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda @@ -574,7 +574,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hac325c4_1005.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/giflib-5.2.2-h10d778d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/glog-0.7.1-h2790a97_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf4-4.2.15-h8138101_7.conda @@ -620,10 +620,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240722.0-cxx17_hac325c4_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libaec-1.1.3-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.4-h20e244c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h74c0fbd_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h3475b9b_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h00291cd_2.conda @@ -631,7 +631,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libcblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcrc32c-1.1.2-he49afe7_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.1-h58e7537_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.1-hf95d169_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.2-hf95d169_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.22-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda @@ -654,9 +654,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-13_2_0_h97931a8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-13.2.0-h2873a65_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.1-h63bbcf2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.29.0-hade041e_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.29.0-h8126ed0_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.2-hb6ef654_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.30.0-hade041e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.30.0-h8126ed0_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.65.5-hb88832f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.17-hd75f5a5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.22.5-hdfe23c8_3.conda @@ -668,11 +668,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libntlm-1.4-h0d85af4_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libopenblas-0.3.27-openmp_h8869122_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.44-h4b8f8c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-h6e894e8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-ha324e28_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-5.27.5-h62b0dff_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.11.01-hd530cb8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2024.07.02-hd530cb8_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-hdfb80b9_17.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.20-hfdf4475_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hc43c327_11.conda @@ -687,7 +687,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libxslt-1.1.39-h03b04e6_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.11.1-h3116616_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.1-h545e0da_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.2-hf78d878_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hcc8fd36_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h83408cd_1.conda @@ -695,7 +695,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/lzo-2.10-h10d778d_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.1-py312hca98d7e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.2-py312hbe3f5e4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h30cc4df_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -706,20 +706,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.1.0-py312hc5c4d5f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.2-py312hb553811_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.12.0-py312h3d0f464_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.105-h3135457_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py312hc3b515d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.2-py312he4d506f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openjpeg-2.5.2-h7310d3a_0.conda @@ -737,10 +737,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pcre2-10.44-h7634a1b_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312h683ea77_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-11.0.0-py312h66fe14f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.43.4-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda @@ -750,18 +751,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h65860a0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h13864a0_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h88d1a9c_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.5.0-h70d2bda_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312h3d0f464_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-h00291cd_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-17.0.0-py312h0be7463_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-core-17.0.0-py312h63b501a_1_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.4-py312h669792a_0.conda @@ -769,7 +769,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312hab44e94_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312hab44e94_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.10.0-py312h8f0a83f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.7.0-py312h9673cc4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda @@ -783,13 +783,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h1060d5c_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/qhull-2020.2-h3c5361c_5.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.57-h694c41f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.4.1-py312h89b8ddc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.11.01-h2fb0a26_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2024.07.02-h2fb0a26_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda @@ -801,7 +801,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312h669792a_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.9-py312he6c0bb9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.2-py312h9d777eb_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312h888eae2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h4ff98d2_2.conda @@ -820,7 +820,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-h1b23fdf_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-hcef368d_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 @@ -832,7 +832,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20241016-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda @@ -844,7 +844,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/uriparser-0.9.8-h6aefe2f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.3-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda @@ -875,44 +875,44 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hce3b56f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hd0ca3c1_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-heca9ddf_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_11.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-hf27581b_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.6-h56e9fbd_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-hf1fc857_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h26f7782_6.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h25dd3c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.13.0-haf5610f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.9.0-h148e6f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-ha77ad07_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-h148e6f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hdb8b6b4_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-h891f644_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.29-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-h891f644_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-h9d4aeba_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-h4d3606f_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.19-h5c5bb51_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-h98e454b_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.7-h23cec54_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-h891f644_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-h891f644_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h97eb5cb_8.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h1b410ea_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.14.0-haf5610f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.10.0-hd6deed7_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-h3241184_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-hd6deed7_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/black-24.10.0-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-h85f69ea_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.1-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.2-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -921,31 +921,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-hc2ea260_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-h5bc515d_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.2-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.3-py312h31fea79_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.7-py312hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-1.0.0-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/dart-sass-1.58.3-h57928b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.6-py312h275cf98_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.7-py312h275cf98_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/deno-1.41.0-h1f5608b_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/deno-dom-0.1.35-h8b8d39b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/double-conversion-3.3.0-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.24.0-h57928b3_0.conda @@ -979,7 +979,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.13.0-h5a68840_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h496ac4d_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-9.0.0-h2bedf89_1.conda @@ -1026,23 +1026,23 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240722.0-cxx17_he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.4-haf234dc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h297d146_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-ha019072_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-24_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.1-default_ha5278ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.2-default_ha5278ca_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.1-h1ee3ff0_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.22-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.3-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.1.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.2.0-h1383e82_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h042995d_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_7.conda @@ -1057,10 +1057,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-hfaa227e_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.1-h7025463_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.1.0-h1383e82_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.29.0-ha00044d_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.29.0-he5eb982_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.2-h7025463_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.2.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.30.0-ha00044d_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.30.0-he5eb982_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.65.5-ha20e22e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-hcfcfb64_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda @@ -1068,11 +1068,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h92078aa_114.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.44-h3ca93ac_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-5.27.5-hcaed137_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.11.01-h4eb7d71_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2024.07.02-h4eb7d71_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-hd4c2148_17.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-h939089a_11.conda @@ -1095,7 +1095,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.1-py312h31fea79_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -1107,17 +1107,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.12.0-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py312hcccf92d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.2-py312h49bc9c5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.2-h3d672ee_0.conda @@ -1134,9 +1134,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pcre2-10.44-h3d7b363_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-11.0.0-py312ha41cd45_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.43.4-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda @@ -1146,25 +1147,24 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h9415970_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.5.0-hd9569ee_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-h0e40799_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-17.0.0-py312h7e22eef_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-17.0.0-py312h6a9c419_1_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.4-py312h2615798_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.10.0-py312h8705084_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.7.0-py312ha24589b_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.3-py312h2ee7485_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.8.0-py312h2ee7485_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda @@ -1176,17 +1176,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-306-py312h53d5487_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-307-py312h275cf98_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.14-py312h275cf98_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.3-hfb098fa_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.8.0-hfb098fa_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.57-h57928b3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.4.1-py312h1701b51_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.11.01-hd3b24a8_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2024.07.02-hd3b24a8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 @@ -1197,7 +1197,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.9-py312h881003e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.2-py312h816cc57_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h337df96_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h0c580ee_2.conda @@ -1217,7 +1217,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h34a6a78_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h19c5691_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 @@ -1229,7 +1229,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20241016-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda @@ -1243,7 +1243,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_22.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_22.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_22.conda - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.3-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda @@ -1285,45 +1285,45 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.12-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-h57bd9a3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h29ce20c_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h5e77a74_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h4e6ae90_11.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-h02abb05_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.6-h834ce55_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h756ea98_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-h3e6eb3e_6.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h9f1560d_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.9.0-hd126650_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h1d30c4a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-ha3822c6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-h0f25b8a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-he1a10d6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hae4d56a_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.29-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h2bff981_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h19b0707_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h14a7884_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.19-hc9e6898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-hb8d5873_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.7-h666547d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h2bff981_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h2bff981_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-hbe26082_8.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h25d6d5c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/black-24.10.0-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.1-heb4867d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -1332,32 +1332,32 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-ha728647_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.2-py312h66e93f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.3-py312h178313f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dart-sass-1.58.3-ha770c72_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.6-py312h2ec8cdc_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.7-py312h2ec8cdc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-1.41.0-hfc7925d_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/deno-dom-0.1.35-hd9586b0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/esbuild-0.24.0-ha770c72_0.conda @@ -1394,7 +1394,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda @@ -1443,17 +1443,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h364f349_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-ha07344c_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.1-default_hb5137d0_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.1-default_h9c6a7e4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.2-default_hb5137d0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.2-default_h9c6a7e4_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda @@ -1465,8 +1465,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-hd5b9bfb_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda @@ -1481,42 +1481,42 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.1-h2ff4ddf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.29.0-h438788a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.29.0-h0121fbd_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.30.0-h438788a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.30.0-h0121fbd_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.65.5-hf5c653b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.1-ha7bfdaf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.2-ha7bfdaf_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.27.5-h5b01275_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.11.01-hbbce691_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2024.07.02-hbbce691_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_11.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2 @@ -1536,7 +1536,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.1-py312h178313f_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.9.2-py312h7900ff3_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -1547,7 +1547,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.12.0-py312h66e93f0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda @@ -1556,13 +1556,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/nss-3.105-hd34e28f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.2-py312h58c1407_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda @@ -1580,10 +1580,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h56024de_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda @@ -1593,26 +1594,25 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.4-py312h12e396e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyogrio-0.10.0-py312he8b4914_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.3-py312h91f0f75_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.8.0-py312h91f0f75_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda @@ -1625,14 +1625,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.8.0-h6e8976b_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/quarto-1.5.57-ha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.4.1-py312h8456570_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.11.01-h77b4e00_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h77b4e00_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda @@ -1642,9 +1642,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.6.9-py312hd18ad41_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.4-h1380c3d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.5-h3931f03_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda @@ -1663,7 +1663,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hedb9d39_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hcd2843e_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 @@ -1675,7 +1675,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20241016-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda @@ -1687,7 +1687,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-5.0.3-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda @@ -1719,7 +1719,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xugrid-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda @@ -1740,46 +1740,46 @@ environments: osx-64: - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py312hb553811_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hb28a666_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hcd1ed9e_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-h2f86973_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_11.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h9d7d61c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.6-hd01826e_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h8128ea2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-hef75ebe_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-h2e282c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.13.0-hf8dbe3c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.9.0-h60298e3_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h2259716_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-hdeff353_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h2123174_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hd993245_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h40772b6_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.29-ha44c9a9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h40772b6_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-ha4d96b0_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-he927b17_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.19-h99e8e40_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h3b26a6a_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.7-hdec1378_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h40772b6_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h40772b6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-h8479783_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-hf1b494f_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.14.0-h9a36307_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.10.0-ha4e2ba9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h3d2f5f1_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-h1ccc5ac_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h86941f0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/black-24.10.0-py312hb401068_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/blosc-1.21.6-h7d75f6d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312h5861a67_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.1-h44e7173_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.2-h32b1619_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -1788,31 +1788,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.17.1-py312hf857d28_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-ha105788_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-h47b6969_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.3.0-py312hc5c4d5f_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.2-py312h3d0f464_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.3-py312hbe3f5e4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cyrus-sasl-2.1.27-hf9bab2b_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cytoolz-1.0.0-py312hb553811_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/dart-sass-1.58.3-h694c41f_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.6-py312h5861a67_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.7-py312h5861a67_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-1.41.0-h86af993_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/deno-dom-0.1.35-h08cba0f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/esbuild-0.24.0-h694c41f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda @@ -1847,7 +1847,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/gflags-2.2.2-hac325c4_1005.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/giflib-5.2.2-h10d778d_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/glog-0.7.1-h2790a97_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/hdf4-4.2.15-h8138101_7.conda @@ -1893,10 +1893,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240722.0-cxx17_hac325c4_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libaec-1.1.3-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.4-h20e244c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h74c0fbd_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h3475b9b_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h00291cd_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h00291cd_2.conda @@ -1904,7 +1904,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libcblas-3.9.0-22_osx64_openblas.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libcrc32c-1.1.2-he49afe7_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.10.1-h58e7537_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.1-hf95d169_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.2-hf95d169_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.22-h00291cd_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda @@ -1927,9 +1927,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libgdal-xls-3.9.2-hc33d192_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-13_2_0_h97931a8_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-13.2.0-h2873a65_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.1-h63bbcf2_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.29.0-hade041e_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.29.0-h8126ed0_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.2-hb6ef654_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.30.0-hade041e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.30.0-h8126ed0_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libgrpc-1.65.5-hb88832f_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.17-hd75f5a5_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.22.5-hdfe23c8_3.conda @@ -1941,11 +1941,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libntlm-1.4-h0d85af4_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libopenblas-0.3.27-openmp_h8869122_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.44-h4b8f8c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-h6e894e8_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-ha324e28_4.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-5.27.5-h62b0dff_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.11.01-hd530cb8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2024.07.02-hd530cb8_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/librttopo-1.1.0-hdfb80b9_17.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libsodium-1.0.20-hfdf4475_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libspatialite-5.1.0-hc43c327_11.conda @@ -1960,7 +1960,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libxslt-1.1.39-h03b04e6_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzip-1.11.1-h3116616_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.1-h545e0da_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.2-hf78d878_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/llvmlite-0.43.0-py312hcc8fd36_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/lz4-4.3.3-py312h83408cd_1.conda @@ -1968,7 +1968,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/lzo-2.10-h10d778d_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.1-py312hca98d7e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.2-py312hbe3f5e4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-3.9.2-py312hb401068_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.9.2-py312h30cc4df_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -1979,20 +1979,20 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.1.0-py312hc5c4d5f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.2-py312hb553811_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.12.0-py312h3d0f464_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nspr-4.35-hea0b92c_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/nss-3.105-h3135457_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numba-0.60.0-py312hc3b515d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.0.2-py312he4d506f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openjpeg-2.5.2-h7310d3a_0.conda @@ -2010,10 +2010,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pcre2-10.44-h7634a1b_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312h683ea77_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-11.0.0-py312h66fe14f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.43.4-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda @@ -2023,18 +2024,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/poppler-24.08.0-h65860a0_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h13864a0_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h88d1a9c_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/proj-9.5.0-h70d2bda_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312h3d0f464_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-h00291cd_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-17.0.0-py312h0be7463_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyarrow-core-17.0.0-py312h63b501a_1_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.23.4-py312h669792a_0.conda @@ -2042,7 +2042,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-10.3.1-py312hab44e94_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-10.3.1-py312hab44e94_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyogrio-0.10.0-py312h8f0a83f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyproj-3.7.0-py312h9673cc4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda @@ -2056,13 +2056,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py312hb553811_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h1060d5c_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/qhull-2020.2-h3c5361c_5.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/quarto-1.5.57-h694c41f_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/rasterio-1.4.1-py312h89b8ddc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.11.01-h2fb0a26_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/re2-2024.07.02-h2fb0a26_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda @@ -2073,7 +2073,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.20.0-py312h669792a_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.6.9-py312he6c0bb9_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.5.2-py312h9d777eb_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312h888eae2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/shapely-2.0.6-py312h4ff98d2_2.conda @@ -2092,7 +2092,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh31c8845_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-h1b23fdf_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-hcef368d_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 @@ -2104,7 +2104,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20241016-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda @@ -2116,7 +2116,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/uriparser-0.9.8-h6aefe2f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-5.0.3-py312hb553811_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda @@ -2148,44 +2148,44 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/_openmp_mutex-4.5-2_gnu.conda - conda: https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py312h4389bb4_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hce3b56f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hd0ca3c1_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-heca9ddf_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_11.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-hf27581b_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.6-h56e9fbd_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-hf1fc857_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h26f7782_6.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h25dd3c2_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.13.0-haf5610f_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.9.0-h148e6f0_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-ha77ad07_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-h148e6f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hdb8b6b4_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-h891f644_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.29-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-h891f644_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-h9d4aeba_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-h4d3606f_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.19-h5c5bb51_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-h98e454b_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.7-h23cec54_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-h891f644_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-h891f644_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h97eb5cb_8.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h1b410ea_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.14.0-haf5610f_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.10.0-hd6deed7_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-h3241184_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-hd6deed7_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beartype-0.19.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/black-24.10.0-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.6-h85f69ea_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h275cf98_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.1-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.2-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 @@ -2194,31 +2194,31 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-hc2ea260_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-h5bc515d_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/contextily-1.6.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/contourpy-1.3.0-py312hd5eb7cc_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.2-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.3-py312h31fea79_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.7-py312hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cytoolz-1.0.0-py312h4389bb4_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/dart-sass-1.58.3-h57928b3_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.6-py312h275cf98_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.7-py312h275cf98_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/deno-1.41.0-h1f5608b_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/deno-dom-0.1.35-h8b8d39b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/double-conversion-3.3.0-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/esbuild-0.24.0-h57928b3_0.conda @@ -2252,7 +2252,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/geos-3.13.0-h5a68840_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.3-h496ac4d_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/graphite2-1.3.13-h63175ca_1003.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/griffe-1.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/harfbuzz-9.0.0-h2bedf89_1.conda @@ -2299,23 +2299,23 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libabseil-20240722.0-cxx17_he0c23c2_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libaec-1.1.3-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libarchive-3.7.4-haf234dc_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h297d146_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_20_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-ha019072_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_22_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.1.0-h2466b09_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-24_win64_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.1-default_ha5278ca_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.2-default_ha5278ca_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libcrc32c-1.1.2-h0e60522_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libcurl-8.10.1-h1ee3ff0_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.22-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libevent-2.1.12-h3671451_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.3-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.1.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.2.0-h1383e82_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-3.9.2-h57928b3_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-core-3.9.2-h042995d_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-fits-3.9.2-h0a0b71e_7.conda @@ -2330,10 +2330,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-postgisraster-3.9.2-hfaa227e_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-tiledb-3.9.2-hb8b5d01_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgdal-xls-3.9.2-hd0e23a6_7.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.1-h7025463_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.1.0-h1383e82_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.29.0-ha00044d_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.29.0-he5eb982_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.2-h7025463_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.2.0-h1383e82_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.30.0-ha00044d_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.30.0-he5eb982_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libgrpc-1.65.5-ha20e22e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-hcfcfb64_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libintl-0.22.5-h5728263_3.conda @@ -2341,11 +2341,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-h538826c_1021.conda - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-24_win64_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.9.2-nompi_h92078aa_114.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_20_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_22_cpu.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.44-h3ca93ac_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_4.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libprotobuf-5.27.5-hcaed137_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.11.01-h4eb7d71_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2024.07.02-h4eb7d71_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-hd4c2148_17.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.1.0-h939089a_11.conda @@ -2368,7 +2368,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/lzo-2.10-hcfcfb64_1001.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.1-py312h31fea79_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.9.2-py312h2e8e312_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.9.2-py312h90004f6_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda @@ -2380,17 +2380,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.1.0-py312hd5eb7cc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/multimethod-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.12.0-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numba-0.60.0-py312hcccf92d_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.0.2-py312h49bc9c5_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/odc-geo-0.4.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.2-h3d672ee_0.conda @@ -2407,9 +2407,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pcre2-10.44-h3d7b363_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-11.0.0-py312ha41cd45_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.43.4-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda @@ -2419,25 +2420,24 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/poppler-24.08.0-h9415970_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_4.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/proj-9.5.0-hd9569ee_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_2.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-h0e40799_1002.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-17.0.0-py312h7e22eef_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyarrow-core-17.0.0-py312h6a9c419_1_cpu.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pydantic-core-2.23.4-py312h2615798_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyogrio-0.10.0-py312h8705084_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyproj-3.7.0-py312ha24589b_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.3-py312h2ee7485_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.8.0-py312h2ee7485_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda @@ -2449,17 +2449,17 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-306-py312h53d5487_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-307-py312h275cf98_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.14-py312h275cf98_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py312h4389bb4_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/qhull-2020.2-hc790b64_5.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.3-hfb098fa_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.8.0-hfb098fa_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/quarto-1.5.57-h57928b3_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.8.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/rasterio-1.4.1-py312h1701b51_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rasterstats-0.20.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2023.11.01-hd3b24a8_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/re2-2024.07.02-hd3b24a8_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 @@ -2469,7 +2469,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.20.0-py312h2615798_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.6.9-py312h881003e_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.5.2-py312h816cc57_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h337df96_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.6-py312h0c580ee_2.conda @@ -2489,7 +2489,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h34a6a78_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h19c5691_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 @@ -2501,7 +2501,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.2.0.20241003-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20241016-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda @@ -2515,7 +2515,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_22.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_22.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_22.conda - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-5.0.3-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda @@ -2650,14 +2650,13 @@ packages: timestamp: 1716290348421 - kind: conda name: anyio - version: 4.6.0 - build: pyhd8ed1ab_1 - build_number: 1 + version: 4.6.2.post1 + build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.0-pyhd8ed1ab_1.conda - sha256: d05493abca6ac1b0cb15f5d48c3117bddd73cc21e48bfcb460570cfa2ea2f909 - md5: bc13891a047f50728b03595531f7f92e + url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda + sha256: 4b54b7ce79d818e3cce54ae4d552dba51b7afac160ceecdefd04b3917a37c502 + md5: 688697ec5e9588bdded167d19577625b depends: - exceptiongroup >=1.0.2 - idna >=2.8 @@ -2671,8 +2670,8 @@ packages: license_family: MIT purls: - pkg:pypi/anyio?source=hash-mapping - size: 108445 - timestamp: 1726931347728 + size: 109864 + timestamp: 1728935803440 - kind: conda name: appdirs version: 1.4.4 @@ -2828,23 +2827,6 @@ packages: - pkg:pypi/asttokens?source=hash-mapping size: 28922 timestamp: 1698341257884 -- kind: conda - name: astunparse - version: 1.6.3 - build: pyhd8ed1ab_0 - subdir: noarch - noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - sha256: e5173d1ed038038e24c0623f0219dc587ee8663cf7efa737e7075128edbc6c60 - md5: 000b6f68a0bfaba800ced7500c11780f - depends: - - python >=3.6 - - six >=1.6.1,<2.0 - license: BSD-3-Clause AND PSF-2.0 - purls: - - pkg:pypi/astunparse?source=hash-mapping - size: 15539 - timestamp: 1610696401707 - kind: conda name: async-lru version: 2.0.4 @@ -2883,95 +2865,98 @@ packages: - kind: conda name: aws-c-auth version: 0.7.31 - build: h57bd9a3_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-h57bd9a3_0.conda - sha256: 7706d49b8011da81d5dc54e9bad06f67d43edb1ff2aa1dcc3dbc737d53d2a4ef - md5: 83be3b5e072d88b76841cc02c6dd458e + build: hd993245_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hd993245_2.conda + sha256: c35b066d0ab07bd1ec0a2f731430c57e299dcdc8e59f2cb463862e64c4398fdf + md5: 528d272421b734a981ae041cce3b65a2 depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 107753 - timestamp: 1726544311370 + size: 94546 + timestamp: 1728796353730 - kind: conda name: aws-c-auth version: 0.7.31 - build: hb28a666_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-auth-0.7.31-hb28a666_0.conda - sha256: 51bf3047115f21c89e96999ec7a0c46a7684334ffe5a3584547a2e1f9e14ba2a - md5: d14e2cb987740374e14e871456356b76 + build: hdb8b6b4_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hdb8b6b4_2.conda + sha256: dc167ed4a106adc5f7f16b08617f2f9d47ddf13da371ee6afc7154b9dff821e5 + md5: 934891110bd0bc15a2465e9c457be8ba depends: - - __osx >=10.13 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 94230 - timestamp: 1726544392579 + size: 103181 + timestamp: 1728796873698 - kind: conda name: aws-c-auth version: 0.7.31 - build: hce3b56f_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-auth-0.7.31-hce3b56f_0.conda - sha256: b154854dc8b0c66bf7282da5668352a93f8d36e44936f8adb5bdabe519596e69 - md5: 49f9d09893f4356733ea584c1ef088ce + build: he1a10d6_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.31-he1a10d6_2.conda + sha256: 83fa4b24101cd85da825dcbb7611390c2a6e31a3fc17abb4d1ee5b8c40bdaa5a + md5: 76550a294cc78aaccfca7824bb4814ce depends: + - __glibc >=2.17,<3.0.a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 102819 - timestamp: 1726544858712 + size: 107301 + timestamp: 1728796325782 - kind: conda name: aws-c-cal version: 0.7.4 - build: h8128ea2_1 - build_number: 1 + build: h40772b6_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h8128ea2_1.conda - sha256: 6ffa143181fa40bbbe1b5dfad149b68e4c3fcb6e5d38a4f5a4490c8c3b4402df - md5: 195ef3e2d7dadb02a4b1f874a1e5e1e6 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-cal-0.7.4-h40772b6_2.conda + sha256: 4770ffec772090bbddc354c24bc2e6425d3071b1e09fa589606689fabcaff25f + md5: a5c1b1cb5a03d4a5b67e98df8da31aa1 depends: - __osx >=10.13 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - openssl >=3.3.1,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 39204 - timestamp: 1725829973 + size: 39297 + timestamp: 1728755588333 - kind: conda name: aws-c-cal version: 0.7.4 - build: hf1fc857_1 - build_number: 1 + build: h891f644_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-hf1fc857_1.conda - sha256: f7ea9d52f759775dde2a39e1a2325e4659bfb2859f7a45798323c7cb00ed2770 - md5: 7c01760e07f867666662a4d91e998308 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-cal-0.7.4-h891f644_2.conda + sha256: 1458b30f5a7305c98885ffc0f392b4161ac7459b9500377e662e25c5878c27bc + md5: 728652c109a9ed14aee25d6ca1e33da6 depends: - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - openssl >=3.3.1,<4.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -2979,182 +2964,162 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 46848 - timestamp: 1725830274457 + size: 47297 + timestamp: 1728756066306 - kind: conda name: aws-c-cal version: 0.7.4 - build: hfd43aa1_1 - build_number: 1 + build: hae4d56a_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda - sha256: 8c8100499b7fced0c6a5eea156e85994d3bb0702b30eecedd949d555ca11f6a8 - md5: f301eb944d297fc879c441fffe461d8a + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hae4d56a_2.conda + sha256: 4bfed63898a1697364ce9621e1fc09c98f143777b0ca60655eb812efa5bf246d + md5: cdc628e4ffb4ffcd476e3847267e1689 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - libgcc >=13 - openssl >=3.3.1,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 47532 - timestamp: 1725829965837 + size: 47181 + timestamp: 1728755555430 - kind: conda name: aws-c-common - version: 0.9.28 - build: h00291cd_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.28-h00291cd_0.conda - sha256: 9af8c4514526829de390bc5f5c103487dff1cd025463ea90b7f8dbb8f1d0ff16 - md5: ffe8898e6d97ecb791df1350ce273508 + version: 0.9.29 + build: h2466b09_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.29-h2466b09_0.conda + sha256: 4d8b757985dccad92608f173ba299a4781ff755670858b92f6bbdb8cf78cf851 + md5: 1a55ee7e7eb4477195128275b8747026 depends: - - __osx >=10.13 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 225877 - timestamp: 1725670122224 + size: 235092 + timestamp: 1728707219604 - kind: conda name: aws-c-common - version: 0.9.28 - build: h2466b09_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-common-0.9.28-h2466b09_0.conda - sha256: 102e955695d4b996753773552820b18b6d0c1f8d77ac0412041341bece100815 - md5: 3ffb0664a913a557bf89ed1834d0c12c + version: 0.9.29 + build: ha44c9a9_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-common-0.9.29-ha44c9a9_0.conda + sha256: 2a1f37f67fabac89ef9f4f9e105c33993cab22edb94801d03555a5ab44b9c557 + md5: 51d626987f9327896b2e3ac2d36f2163 depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __osx >=10.13 license: Apache-2.0 license_family: Apache purls: [] - size: 233724 - timestamp: 1725670503118 + size: 226610 + timestamp: 1728706789415 - kind: conda name: aws-c-common - version: 0.9.28 + version: 0.9.29 build: hb9d3cd8_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda - sha256: febe894ae2f5bfc4d65c51bd058433e9061d994ff06b30d5eca18919639c5083 - md5: 1b53af320b24547ce0fb8196d2604542 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.29-hb9d3cd8_0.conda + sha256: b3b50f518e9afad383f6851bf7000cf8b343d7d3ca71558df233ee7b4bfc2919 + md5: acc51b49fd7467c8dfe4343001b812b4 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 236451 - timestamp: 1725670076853 + size: 237231 + timestamp: 1728706773555 - kind: conda name: aws-c-compression version: 0.2.19 - build: h756ea98_1 - build_number: 1 + build: h2bff981_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda - sha256: 0e7fd40a9f8aa235e78202af75a421a7f6ea589e30c5cbe1787ceaccf36a3ce9 - md5: 5e08c385a1b8a79b52012b74653bbb99 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h2bff981_2.conda + sha256: 908a416ff3f62b09bed436e1f77418f54115412244734d3960b11d586dd0749f + md5: 87a059d4d2ab89409496416119dd7152 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 19116 - timestamp: 1725829968483 + size: 18983 + timestamp: 1728750679322 - kind: conda name: aws-c-compression version: 0.2.19 - build: h8128ea2_1 - build_number: 1 + build: h40772b6_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h8128ea2_1.conda - sha256: f60f8bec5eddd1974367aac03a646996374d8f290bb4463dfbf1e7620462e7be - md5: 43be0637437461d48ff524c04459ee46 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-compression-0.2.19-h40772b6_2.conda + sha256: e4e7c8ebc1761f263927af2eedddbba0b5698e05a073100b953b0d0d33cc969b + md5: 083875412346dcc097c6b1ca4aaa4abf depends: - __osx >=10.13 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 17936 - timestamp: 1725829971987 + size: 18004 + timestamp: 1728750733091 - kind: conda name: aws-c-compression version: 0.2.19 - build: hf1fc857_1 - build_number: 1 + build: h891f644_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-hf1fc857_1.conda - sha256: 0e5913b72e730644a9ea8b5ed8d8fbc32d288d202882a9ec089b64a18612dc31 - md5: 289e8943be0dce6b1abf60652bc1492e + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-compression-0.2.19-h891f644_2.conda + sha256: ad9b85a2b60d6da4ae729d039d26ffb6aaaaeb1227546e18c020634b6155ca54 + md5: 690cae4ce0e03c0ac7ccab5ae0c7f4c2 depends: - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 22447 - timestamp: 1725830398597 + size: 22761 + timestamp: 1728750939878 - kind: conda name: aws-c-event-stream version: 0.4.3 - build: h29ce20c_2 - build_number: 2 + build: h19b0707_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h29ce20c_2.conda - sha256: ac5e04779811b29fc47e06d6bb9ea6436511216ea2871ad6917c3894174c5fa3 - md5: d533baa7e43239591d5cc0233849c475 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h19b0707_4.conda + sha256: 951f96eb45a439a36935dc2099e10c902518ec511a287c1685ca65a88a9accaa + md5: df38f56123f30d61de24474e600e7d41 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 - libgcc >=13 - libstdcxx >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 54116 - timestamp: 1726327201288 + size: 53821 + timestamp: 1728792746255 - kind: conda name: aws-c-event-stream version: 0.4.3 - build: hcd1ed9e_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-hcd1ed9e_2.conda - sha256: f717b5d9baa8206d7069f80cd2f5a2cd99747b3f1e3aed4bea7e392846979d9b - md5: 73bde3fabf8b8f6f2be9cc6f152d0606 - depends: - - __osx >=10.13 - - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 - - aws-checksums >=0.1.20,<0.1.21.0a0 - - libcxx >=17 - license: Apache-2.0 - license_family: Apache - purls: [] - size: 46699 - timestamp: 1726327279325 -- kind: conda - name: aws-c-event-stream - version: 0.4.3 - build: hd0ca3c1_2 - build_number: 2 + build: h9d4aeba_4 + build_number: 4 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-hd0ca3c1_2.conda - sha256: be7815f98f210acc1e6cbac1d9a0cb05d6f91fe53c2dd62cab585c4da66359e3 - md5: 93704218ce07e4d961299e170ed430b6 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-event-stream-0.4.3-h9d4aeba_4.conda + sha256: 2e91d6b19c907cac47b1fbac185657857bcf696c9e392033d9bdd8822a0662af + md5: 87b4ba6f710c80e6255c65dfea69f56d depends: - - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -3162,197 +3127,223 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 54331 - timestamp: 1726327493766 + size: 54367 + timestamp: 1728793025069 - kind: conda - name: aws-c-http - version: 0.8.10 - build: h2f86973_0 + name: aws-c-event-stream + version: 0.4.3 + build: ha4d96b0_4 + build_number: 4 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-h2f86973_0.conda - sha256: dae6a6be9a6fc1c2d6318c62d2b89e20fe75d8df5a4b7766b95be788cfb9516c - md5: 4160f0e92d2f25532ee52b625556e488 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-event-stream-0.4.3-ha4d96b0_4.conda + sha256: fc11bc48cf035c669e5cc2f050e2037c2c08d047e480e3dd31a714ff3d96c536 + md5: 5a626a594c1099b0b05a7851391bfe75 depends: - __osx >=10.13 - - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-compression >=0.2.19,<0.2.20.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 + - aws-checksums >=0.1.20,<0.1.21.0a0 + - libcxx >=17 license: Apache-2.0 license_family: Apache purls: [] - size: 164396 - timestamp: 1726469195066 + size: 46741 + timestamp: 1728792866753 - kind: conda name: aws-c-http version: 0.8.10 - build: h5e77a74_0 + build: h14a7884_2 + build_number: 2 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h5e77a74_0.conda - sha256: 887af55b895502ef7611ad0dd5e19990385b05348262d6c5a8a22330490b14e7 - md5: 947cd303444ea92a382a10e43bad1a3f + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.10-h14a7884_2.conda + sha256: 0561267292739a451d7d389f100330fefafb97859962f617cd5268c96400e3aa + md5: 6147c6b6cef67adcb85516f5cf775be7 depends: - __glibc >=2.17,<3.0.a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-compression >=0.2.19,<0.2.20.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 197233 - timestamp: 1726469181157 + size: 197562 + timestamp: 1728792795954 - kind: conda name: aws-c-http version: 0.8.10 - build: heca9ddf_0 + build: h4d3606f_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-heca9ddf_0.conda - sha256: 2d474df981675d8d4bef7b22485c76cbf05df6b65bb2ea3f07363ebc0f6ed34c - md5: efd3dc45770f91dcd4f3a82f50cbea53 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-http-0.8.10-h4d3606f_2.conda + sha256: d8d9258a5a57d3f4b10759b848008ab0f0295c5a8d4fcf48b8caabf4400c4c3b + md5: bfbf20092c8fc6496c552fe32887ad1e depends: - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-compression >=0.2.19,<0.2.20.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 182262 - timestamp: 1726469702580 + size: 181401 + timestamp: 1728793364673 - kind: conda - name: aws-c-io - version: 0.14.18 - build: h3831a8d_11 - build_number: 11 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.18-h3831a8d_11.conda - sha256: 1ce3c60df424a6e32b69697ce6c126e36cffc065f9dc31a82da5486166120f07 - md5: 297f12ac4c10e0597e760c3751bf4b52 + name: aws-c-http + version: 0.8.10 + build: he927b17_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-http-0.8.10-he927b17_2.conda + sha256: 8312d2bb237fa04fb167dd02bb650d5621874e0ff05c016aea226b4b1a84b796 + md5: 45edfe7067d22c11427a35a8de669039 depends: + - __osx >=10.13 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - aws-c-common >=0.9.29,<0.9.30.0a0 + - aws-c-compression >=0.2.19,<0.2.20.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 161125 - timestamp: 1728231479155 + size: 163779 + timestamp: 1728792806800 - kind: conda name: aws-c-io - version: 0.14.18 - build: h4e6ae90_11 - build_number: 11 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h4e6ae90_11.conda - sha256: a03b3dfdf221592e17fdf4d4e96ecebfab7052e69bc22adc5eb68b2fc54200de - md5: 21fd3e17dab1b20a0acdbc8b406ee7af + version: 0.14.19 + build: h5c5bb51_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-io-0.14.19-h5c5bb51_1.conda + sha256: 5cd48ced1408dcb36bb621ec5f2a4038925b2d9bec9c655bd03173811808437c + md5: 2ad4977c474af198170c439c6905ccdb depends: - - __glibc >=2.17,<3.0.a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 - - libgcc >=13 - - s2n >=1.5.4,<1.5.5.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 159269 - timestamp: 1728231235486 + size: 159347 + timestamp: 1728771151643 - kind: conda name: aws-c-io - version: 0.14.18 - build: hf9a0f1c_11 - build_number: 11 + version: 0.14.19 + build: h99e8e40_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.18-hf9a0f1c_11.conda - sha256: a9683e05898d0622443b2bf0b38945a2599ebbd90b64d2640a21bd514d00ada5 - md5: ccab53307c565057ad2c954effab34ab + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-io-0.14.19-h99e8e40_1.conda + sha256: b3805841ed7004860f6dc364c0af8073d41405e10b08cc6b96a0d24cf1dd0c76 + md5: f7d331cf9a5237add8eef5f47d2181d5 depends: - __osx >=10.13 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 138446 - timestamp: 1728231289821 + size: 138184 + timestamp: 1728770990521 - kind: conda - name: aws-c-mqtt - version: 0.10.7 - build: h02abb05_0 + name: aws-c-io + version: 0.14.19 + build: hc9e6898_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-h02abb05_0.conda - sha256: dfc23a658ee659b0bf86545bd76d14710bfb6fb1457824b85e49a0e99b0aaea9 - md5: b442b985952afe5820da96bb976ee006 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.19-hc9e6898_1.conda + sha256: 35f9719fb9d5ddf4955a432d73d910261d60754d20b58de2be2701a2e68a9cfb + md5: ec84785f7ae14ed43156a54aec33bb14 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 - - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-cal >=0.7.4,<0.7.5.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - libgcc >=13 + - s2n >=1.5.5,<1.5.6.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 195051 - timestamp: 1728339436377 + size: 158806 + timestamp: 1728770974012 - kind: conda name: aws-c-mqtt version: 0.10.7 - build: h9d7d61c_0 + build: h3b26a6a_2 + build_number: 2 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h9d7d61c_0.conda - sha256: e17efadc9db5b4397f1a2ce8714bf60a2c5269764dd95000c2a2c97f28e663eb - md5: cfa8c785abedd8caaf6a58703d215c44 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-mqtt-0.10.7-h3b26a6a_2.conda + sha256: 164533013fecc8d894f1a8406caf3a274c72766c10035d2cbd6b83b5fe6e09c4 + md5: 68565e8bf009f60b93c102965a99dbc0 depends: - __osx >=10.13 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 163926 - timestamp: 1728339489746 + size: 163992 + timestamp: 1728797661495 - kind: conda name: aws-c-mqtt version: 0.10.7 - build: hf27581b_0 + build: h98e454b_2 + build_number: 2 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-hf27581b_0.conda - sha256: 7ba0d682b81f8fdd5b66edf251026a5bfed3b2c51b6d53dbdb5d93985fe451c1 - md5: 7c1bb68151f9b81e1369bbcaa05a574e + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-mqtt-0.10.7-h98e454b_2.conda + sha256: 19a23680b17d9667f1e6fa67ef0f7222a7e2c65868f73fbbf309e98194ef7511 + md5: b6f023f52cb88bab9cf5ace656cb9604 depends: - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 186759 - timestamp: 1728340065107 + size: 187037 + timestamp: 1728798083507 +- kind: conda + name: aws-c-mqtt + version: 0.10.7 + build: hb8d5873_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.7-hb8d5873_2.conda + sha256: b30a3d8ba9352760c30f696b65486fe0e1d3cfe771f114b008a70ad440eb00c0 + md5: 8dc25ca24c1a50b8295a848c384ede99 + depends: + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 + - aws-c-http >=0.8.10,<0.8.11.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 + - libgcc >=13 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 195951 + timestamp: 1728797647791 - kind: conda name: aws-c-s3 - version: 0.6.6 - build: h56e9fbd_0 + version: 0.6.7 + build: h23cec54_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.6-h56e9fbd_0.conda - sha256: 15c45a36c07cdbfbb5ec393e6b6d10d15a87df7d2dd87db9fa594b13a3359987 - md5: 0b301304eebf6697381350eb096bd1a5 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-s3-0.6.7-h23cec54_0.conda + sha256: 39836725221a3a389be362810634200cb086528218df6038e4ca5afb7a1bb8b5 + md5: 3073d7220086be9d8031dddd428f3e27 depends: - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -3360,247 +3351,251 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 108140 - timestamp: 1726722849474 + size: 108565 + timestamp: 1728967348047 - kind: conda name: aws-c-s3 - version: 0.6.6 - build: h834ce55_0 + version: 0.6.7 + build: h666547d_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.6-h834ce55_0.conda - sha256: b5e921f2bca092eec7355e296292f84a3db6e37802be61c56bf865edc4246532 - md5: dbf33f245023697941d4ff6b996d2b2c + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.7-h666547d_0.conda + sha256: fe006f58bd9349ab7cd4cd864dd4e83409e89764b10d9d7eb7ec148e2f964465 + md5: 7f59dcbbd4eab14ca9256f20b43849eb depends: - __glibc >=2.17,<3.0.a0 - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 - libgcc >=13 - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 112595 - timestamp: 1726722460857 + size: 113457 + timestamp: 1728967087200 - kind: conda name: aws-c-s3 - version: 0.6.6 - build: hd01826e_0 + version: 0.6.7 + build: hdec1378_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.6-hd01826e_0.conda - sha256: 24794cfcaf9d6da28192dda88449dad1e112408a96c51e5a0df6a1925c4e8a57 - md5: 7fa9af757e8376f738eb314518ec282b + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-s3-0.6.7-hdec1378_0.conda + sha256: bc83cd8471e444a20dfd03625aaf30f5906540db0fecc32b829ab622c6ebc9b4 + md5: 8ab8f088f6a64382b8ecfff7262782e2 depends: - __osx >=10.13 - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 97414 - timestamp: 1726722505830 + size: 97683 + timestamp: 1728967098949 - kind: conda name: aws-c-sdkutils version: 0.1.19 - build: h756ea98_3 - build_number: 3 + build: h2bff981_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda - sha256: 4e6f79f3fee5ebb4fb12b6258d91315ed0f7a2ac16c75611cffdbaa0d54badb2 - md5: bfe6623096906d2502c78ccdbfc3bc7a + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h2bff981_4.conda + sha256: ef65ca9eb9f32ada6fb1b47759374e7ef4f85db002f2265ebc8fd61718284cbc + md5: 5a8afd37e2dfe464d68e63d1c38b08c5 depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 55799 - timestamp: 1725836731034 + size: 55957 + timestamp: 1728755888042 - kind: conda name: aws-c-sdkutils version: 0.1.19 - build: h8128ea2_3 - build_number: 3 + build: h40772b6_4 + build_number: 4 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h8128ea2_3.conda - sha256: 50912641279d00a6ce12b1d72e74ea5d30078e91a0557a48a9e9fe285c2f6b2c - md5: 8d93b3603363214303737f74b6efb5da + url: https://conda.anaconda.org/conda-forge/osx-64/aws-c-sdkutils-0.1.19-h40772b6_4.conda + sha256: a74a1bdc601ab112d849b414908aa01451f8d0de27c0b233155fea07d69e0551 + md5: 8d7e97d7c9829f54acbf018a88f2f20e depends: - __osx >=10.13 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 50686 - timestamp: 1725836776385 + size: 50711 + timestamp: 1728755916895 - kind: conda name: aws-c-sdkutils version: 0.1.19 - build: hf1fc857_3 - build_number: 3 + build: h891f644_4 + build_number: 4 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-hf1fc857_3.conda - sha256: 5e42bba0f1ffd1a1cc5b80f5abae03c7118809f4545c688e56c2bb5a0ee3740e - md5: b00e5b1b3985d9dfadde29e8b00f85e4 + url: https://conda.anaconda.org/conda-forge/win-64/aws-c-sdkutils-0.1.19-h891f644_4.conda + sha256: 487280bbee81934a2fe1d75873d47512b849bcf0c3db0152141a833fa3323fec + md5: 4a689865568b5cb5e1aecbf759a563c0 depends: - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 55242 - timestamp: 1725837225397 + size: 55175 + timestamp: 1728756284819 - kind: conda name: aws-checksums version: 0.1.20 - build: h756ea98_0 + build: h2bff981_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h756ea98_0.conda - sha256: 4b4543b0ca5528b6ca421f97394d7781a1d7d78b17ac3990d0fbe6a49159a407 - md5: ff7dbb319545f4bd1e5e0f8555cf9e7f + url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.20-h2bff981_1.conda + sha256: e1793f2e52fe04ef3a6b2069abda7960d061c6f7af1f0d5f616d43e7a7c40e3c + md5: 8b424cf6b3cfc5cffe98bf4d16c032fb depends: - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - libgcc >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 72784 - timestamp: 1726281973900 + size: 72862 + timestamp: 1728750748391 - kind: conda name: aws-checksums version: 0.1.20 - build: h8128ea2_0 + build: h40772b6_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h8128ea2_0.conda - sha256: 1953103f0ead6ffbcb73ddbcec617ce9195010fea838f63b5e8e93b8d4bf4bcb - md5: 1fbab35b839a3d822f1b39680298fd9f + url: https://conda.anaconda.org/conda-forge/osx-64/aws-checksums-0.1.20-h40772b6_1.conda + sha256: a32f81d6349580b38a917f1643677650b931fc67fab9c9b123e47c3de4844d21 + md5: a40738142e8dfc05b328ff240ad56c02 depends: - __osx >=10.13 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 70818 - timestamp: 1726281979944 + size: 70907 + timestamp: 1728750777703 - kind: conda name: aws-checksums version: 0.1.20 - build: hf1fc857_0 + build: h891f644_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-hf1fc857_0.conda - sha256: 446710cc7d12beddfe11bfd50a5d2a8f2418b66fb3a0a92a1a9031e041b101e9 - md5: 1b66a8719c94d85fa6658d8f46600f21 + url: https://conda.anaconda.org/conda-forge/win-64/aws-checksums-0.1.20-h891f644_1.conda + sha256: 08476aa93e31656038abc01855a8f9caf0f26da192239f7404920c8d24512913 + md5: d0269d6abbe51548175d5a25764a7aa2 depends: - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 75478 - timestamp: 1726282558694 + size: 75884 + timestamp: 1728751179874 - kind: conda name: aws-crt-cpp version: 0.28.3 - build: h26f7782_6 - build_number: 6 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h26f7782_6.conda - sha256: d17c6a79a872d622656a8d6b54573d230030ef808fc5da2d3bbe5e60518d1c3c - md5: d59a961de1c3039c98f23e8f7532b31a + build: h8479783_8 + build_number: 8 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-h8479783_8.conda + sha256: a89072de71422745b76ae37ca059742848ffadfa59ca3de101d8aee0b7fb125f + md5: ce2b845c4855e0a618f5cf7ee03716c8 depends: + - __osx >=10.13 - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-c-mqtt >=0.10.7,<0.10.8.0a0 - - aws-c-s3 >=0.6.6,<0.6.7.0a0 + - aws-c-s3 >=0.6.7,<0.6.8.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libcxx >=17 license: Apache-2.0 license_family: Apache purls: [] - size: 255207 - timestamp: 1728390584945 + size: 294732 + timestamp: 1729181456647 - kind: conda name: aws-crt-cpp version: 0.28.3 - build: h3e6eb3e_6 - build_number: 6 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-h3e6eb3e_6.conda - sha256: bf85c7ad2875771d29db7f65a346b1937fc6b4c7f44283b159e6f00c2dac7a2c - md5: a12a25457b517277e15228889e568daa + build: h97eb5cb_8 + build_number: 8 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/aws-crt-cpp-0.28.3-h97eb5cb_8.conda + sha256: b93611bfe92bcebd9f423be30806a7ca8332da884bba6b9774dd7963d0cfa4fa + md5: f065becc389d50d4a5a5b2c76965d819 depends: - - __glibc >=2.17,<3.0.a0 - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-c-mqtt >=0.10.7,<0.10.8.0a0 - - aws-c-s3 >=0.6.6,<0.6.7.0a0 + - aws-c-s3 >=0.6.7,<0.6.8.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - libgcc >=13 - - libstdcxx >=13 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Apache-2.0 license_family: Apache purls: [] - size: 349909 - timestamp: 1728389760881 + size: 255230 + timestamp: 1729181718175 - kind: conda name: aws-crt-cpp version: 0.28.3 - build: hef75ebe_6 - build_number: 6 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-crt-cpp-0.28.3-hef75ebe_6.conda - sha256: 540af6454373d89636012578c1d35cffb6fcf997ebb242773f975c13cea3d0f8 - md5: 831c884adc08e9cb33671f5ae024da65 + build: hbe26082_8 + build_number: 8 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.3-hbe26082_8.conda + sha256: a9c23a685929b24fcd032daae36b61c4862912abf0a0a8735aeef53418c5bce6 + md5: 80d5fac04be0e6c2774f57eb7529f145 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - aws-c-auth >=0.7.31,<0.7.32.0a0 - aws-c-cal >=0.7.4,<0.7.5.0a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-c-http >=0.8.10,<0.8.11.0a0 - - aws-c-io >=0.14.18,<0.14.19.0a0 + - aws-c-io >=0.14.19,<0.14.20.0a0 - aws-c-mqtt >=0.10.7,<0.10.8.0a0 - - aws-c-s3 >=0.6.6,<0.6.7.0a0 + - aws-c-s3 >=0.6.7,<0.6.8.0a0 - aws-c-sdkutils >=0.1.19,<0.1.20.0a0 - - libcxx >=17 + - libgcc >=13 + - libstdcxx >=13 license: Apache-2.0 license_family: Apache purls: [] - size: 294267 - timestamp: 1728389843579 + size: 349632 + timestamp: 1729181229435 - kind: conda name: aws-sdk-cpp version: 1.11.407 - build: h25dd3c2_0 + build: h1b410ea_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h25dd3c2_0.conda - sha256: 46337ac3bb24a6f8addeef0b642013989cf7efa2de5c1e12e2d7f62c5137549c - md5: b2d39f93aa57382367d6cacd55ec4f32 + url: https://conda.anaconda.org/conda-forge/win-64/aws-sdk-cpp-1.11.407-h1b410ea_1.conda + sha256: b8a258eee64ac40a77c1c53a430c1903f253a6a6afa3e1ad1408d27f46cd0cb6 + md5: 68cb9340f0279ed0265dd3bb8c4b2c00 depends: - - aws-c-common >=0.9.28,<0.9.29.0a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 @@ -3611,250 +3606,258 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 2805201 - timestamp: 1726639233904 + size: 2803496 + timestamp: 1729236310982 - kind: conda name: aws-sdk-cpp version: 1.11.407 - build: h2e282c2_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-h2e282c2_0.conda - sha256: c6a88e7882325c56e1f25252a5529bd24650207ab44cb31e976daa9b5d8b1914 - md5: 16c5992e1c374f160128336cdf64e171 + build: h25d6d5c_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h25d6d5c_1.conda + sha256: f05d43f3204887cec9a9853a9217f06562b28161950b5485aed1f8afe42aad17 + md5: 0f2bd0128d59a45c9fd56151eab0b37e depends: - - __osx >=10.13 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - - libcurl >=8.10.0,<9.0a0 - - libcxx >=17 + - libcurl >=8.10.1,<9.0a0 + - libgcc >=13 + - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 2787167 - timestamp: 1726638546148 + size: 2931742 + timestamp: 1729235000691 - kind: conda name: aws-sdk-cpp version: 1.11.407 - build: h9f1560d_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.407-h9f1560d_0.conda - sha256: bc250a3879b88c13e91fc03abdca3867c5a0dd7767da5f364d4460f74d64f286 - md5: 5c3dd49b04db05e0e884de48ff77ae24 + build: hf1b494f_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/aws-sdk-cpp-1.11.407-hf1b494f_1.conda + sha256: d9e5d7de941918d2147117d63092a4ae2be2504c128ac486c523ae8f2a4be210 + md5: 36b0704483644427e688edc93952eb7d depends: - - __glibc >=2.17,<3.0.a0 - - aws-c-common >=0.9.28,<0.9.29.0a0 + - __osx >=10.13 + - aws-c-common >=0.9.29,<0.9.30.0a0 - aws-c-event-stream >=0.4.3,<0.4.4.0a0 - aws-checksums >=0.1.20,<0.1.21.0a0 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - - libcurl >=8.10.0,<9.0a0 - - libgcc >=13 - - libstdcxx >=13 + - libcurl >=8.10.1,<9.0a0 + - libcxx >=17 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.2,<4.0a0 license: Apache-2.0 license_family: Apache purls: [] - size: 2935773 - timestamp: 1726638167995 + size: 2775922 + timestamp: 1729235363763 - kind: conda name: azure-core-cpp - version: 1.13.0 - build: h935415a_0 + version: 1.14.0 + build: h5cfcd09_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda - sha256: b7e0a22295db2e1955f89c69cefc32810309b3af66df986d9fb75d89f98a80f7 - md5: debd1677c2fea41eb2233a260f48a298 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda + sha256: fe07debdb089a3db17f40a7f20d283d75284bb4fc269ef727b8ba6fc93f7cb5a + md5: 0a8838771cc2e985cd295e01ae83baf1 depends: - __glibc >=2.17,<3.0.a0 - - libcurl >=8.8.0,<9.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - openssl >=3.3.1,<4.0a0 + - libcurl >=8.10.1,<9.0a0 + - libgcc >=13 + - libstdcxx >=13 + - openssl >=3.3.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 338134 - timestamp: 1720853194547 + size: 345117 + timestamp: 1728053909574 - kind: conda name: azure-core-cpp - version: 1.13.0 - build: haf5610f_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.13.0-haf5610f_0.conda - sha256: e3d828f79368057258140e46404892b0ed8983797c05c04eac3bd24dea71da41 - md5: 14ed34c3091f89784d926cc7cf4b773b + version: 1.14.0 + build: h9a36307_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.14.0-h9a36307_0.conda + sha256: c7694fc16b9aebeb6ee5e4f80019b477a181d961a3e4d9b6a66b77777eb754fe + md5: 1082a031824b12a2be731d600cfa5ccb depends: - - libcurl >=8.8.0,<9.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __osx >=10.13 + - libcurl >=8.10.1,<9.0a0 + - libcxx >=17 + - openssl >=3.3.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 487099 - timestamp: 1720853456727 + size: 303166 + timestamp: 1728053999891 - kind: conda name: azure-core-cpp - version: 1.13.0 - build: hf8dbe3c_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-core-cpp-1.13.0-hf8dbe3c_0.conda - sha256: 1976259d75ef68431039522d7105777ac0621ef8a0f8a31140fa8926b1fe1280 - md5: 514d3cbb527a88930e816370e34caa19 + version: 1.14.0 + build: haf5610f_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/azure-core-cpp-1.14.0-haf5610f_0.conda + sha256: cfa87475993d22a1119f617ab56ae11289809414d0533db942856ed9e27aca11 + md5: 29fefbcaa92a797c0597437a21754c84 depends: - - __osx >=10.13 - - libcurl >=8.8.0,<9.0a0 - - libcxx >=16 - - openssl >=3.3.1,<4.0a0 + - libcurl >=8.10.1,<9.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 296234 - timestamp: 1720853326346 + size: 490425 + timestamp: 1728054410522 - kind: conda name: azure-identity-cpp - version: 1.9.0 - build: h148e6f0_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.9.0-h148e6f0_0.conda - sha256: b049cf07cf19cf16110fc7ba1b7140251977fc5f2145119c68e121b8cebe34cc - md5: efc97c94c6cfeb84ca386d9e33c63c85 + version: 1.10.0 + build: h113e628_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda + sha256: 286b31616c191486626cb49e9ceb5920d29394b9e913c23adb7eb637629ba4de + md5: 73f73f60854f325a55f1d31459f2ab73 depends: - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __glibc >=2.17,<3.0.a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - libgcc >=13 + - libstdcxx >=13 + - openssl >=3.3.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 392359 - timestamp: 1722992699567 + size: 232351 + timestamp: 1728486729511 - kind: conda name: azure-identity-cpp - version: 1.9.0 - build: h60298e3_0 + version: 1.10.0 + build: ha4e2ba9_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.9.0-h60298e3_0.conda - sha256: 1b238982de2532a62a360857e517bbc19037a485fe5b6edaf3dd1e2f7cfe2c40 - md5: 5f61464a9a26242e99615778afc7502e + url: https://conda.anaconda.org/conda-forge/osx-64/azure-identity-cpp-1.10.0-ha4e2ba9_0.conda + sha256: b9899b9698a6c7353fc5078c449105aae58635d217befbc8ca9d5a527198019b + md5: ad56b6a4b8931d37a2cf5bc724a46f01 depends: - __osx >=10.13 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - libcxx >=16 - - openssl >=3.3.1,<4.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - libcxx >=17 + - openssl >=3.3.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 157809 - timestamp: 1722992283584 + size: 175344 + timestamp: 1728487066445 - kind: conda name: azure-identity-cpp - version: 1.9.0 - build: hd126650_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.9.0-hd126650_0.conda - sha256: 0a8e8ce57bc5b22b318e9ab229cb12d29f96245fba7d9618c1b612862b9c84e5 - md5: 470dd41567c68110f0ca2268cd434d8a + version: 1.10.0 + build: hd6deed7_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/azure-identity-cpp-1.10.0-hd6deed7_0.conda + sha256: 1e33a3ac53db30dd1d6a49dfdfd3e7f67b96faeee0d54025665de18bd565439f + md5: 4bb53530510eb2d983c46abce9bee9e2 depends: - - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - openssl >=3.3.1,<4.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 213265 - timestamp: 1722992189687 + size: 401221 + timestamp: 1728487334019 - kind: conda name: azure-storage-blobs-cpp version: 12.13.0 - build: h1d30c4a_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h1d30c4a_0.conda - sha256: 376b1daf96f066c0bca09b61faee182355b21802bcaa6ad7f0494c65888a0b5b - md5: 378dd9c6347c112d301421a00d6446a9 + build: h3241184_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-h3241184_1.conda + sha256: 7daae9b73aa785ace6e6b4a701a5c02655ba9724326354cc6171f22f2fbcbd90 + md5: 2e050178dd479163794041ede5c476c7 depends: - - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libgcc >=13 - - libstdcxx >=13 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 550550 - timestamp: 1727385969150 + size: 981573 + timestamp: 1728578740233 - kind: conda name: azure-storage-blobs-cpp version: 12.13.0 - build: h2259716_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h2259716_0.conda - sha256: 3ac5ace301e04a45ca2b27cc662d218b3dec07d68267af8fd36d617fcd59f833 - md5: 1146aa05e75cf261f615a34ef724d9d5 + build: h3cf044e_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda + sha256: 2606260e5379eed255bcdc6adc39b93fb31477337bcd911c121fc43cd29bf394 + md5: 7eb66060455c7a47d9dcdbfa9f46579b depends: - - __osx >=10.13 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 + - __glibc >=2.17,<3.0.a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libcxx >=17 + - libgcc >=13 + - libstdcxx >=13 license: MIT license_family: MIT purls: [] - size: 444010 - timestamp: 1727386034095 + size: 549342 + timestamp: 1728578123088 - kind: conda name: azure-storage-blobs-cpp version: 12.13.0 - build: ha77ad07_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-blobs-cpp-12.13.0-ha77ad07_0.conda - sha256: 17391511d8229d147632f3da0d55ceabcf2e09842b4e3ac110f7290729cec35a - md5: a0a698cd3f459efd08da14cf532e7c0a + build: h3d2f5f1_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-blobs-cpp-12.13.0-h3d2f5f1_1.conda + sha256: 31984e52450230d04ca98d5232dbe256e5ef6e32b15d46124135c6e64790010d + md5: 3df4fb5d6d0e7b3fb28e071aff23787e depends: - - azure-core-cpp >=1.13.0,<1.13.1.0a0 + - __osx >=10.13 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libcxx >=17 license: MIT license_family: MIT purls: [] - size: 969856 - timestamp: 1727386384989 + size: 445040 + timestamp: 1728578180436 - kind: conda name: azure-storage-common-cpp version: 12.8.0 - build: h148e6f0_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-h148e6f0_0.conda - sha256: 1446059dbeed5940466837afd0f028f34aaca93d4a02b62b1a49fc48016fd8aa - md5: b3f5dabec8fb63d8f3f72a7d95bcc583 + build: h1ccc5ac_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-h1ccc5ac_1.conda + sha256: 51fb67d2991d105b8f7b97b4810cd63bac4dc421a4a9c83c15a98ca520a42e1e + md5: 5b3e79eb148d6e30d6c697788bad9960 depends: - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __osx >=10.13 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - libcxx >=17 + - libxml2 >=2.12.7,<3.0a0 + - openssl >=3.3.2,<4.0a0 license: MIT license_family: MIT purls: [] - size: 241069 - timestamp: 1727272027753 + size: 126229 + timestamp: 1728563580392 - kind: conda name: azure-storage-common-cpp version: 12.8.0 - build: ha3822c6_0 + build: h736e048_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-ha3822c6_0.conda - sha256: 3bb614675fcae708eeb344fe1a21040007ab4efb1de2bd0f1c6171e4c23eb092 - md5: d4a0f1f34459947781a6975beb505fa9 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda + sha256: 273475f002b091b66ce7366da04bf164c3732c03f8692ab2ee2d23335b6a82ba + md5: 13de36be8de3ae3f05ba127631599213 depends: - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 - libgcc >=13 - libstdcxx >=13 - libxml2 >=2.12.7,<3.0a0 @@ -3862,66 +3865,68 @@ packages: license: MIT license_family: MIT purls: [] - size: 149394 - timestamp: 1727271583048 + size: 149312 + timestamp: 1728563338704 - kind: conda name: azure-storage-common-cpp version: 12.8.0 - build: hdeff353_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-common-cpp-12.8.0-hdeff353_0.conda - sha256: 12751ee03a7620cfa453184782d303e7763fc498d1710ff74e15e6ed74244164 - md5: 4f5d3bf98b8c5eef9c65d3d8017ce47a + build: hd6deed7_1 + build_number: 1 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/azure-storage-common-cpp-12.8.0-hd6deed7_1.conda + sha256: 433c3f4691682d5ff06453e02a2d631b8dd91fcf3e507094e0718dcb44a8b5c1 + md5: dae51ef526eb277bc350875cd05fbf08 depends: - - __osx >=10.13 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - libcxx >=17 - - libxml2 >=2.12.7,<3.0a0 - - openssl >=3.3.2,<4.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: [] - size: 126050 - timestamp: 1727271812046 + size: 241811 + timestamp: 1728563797953 - kind: conda name: azure-storage-files-datalake-cpp version: 12.12.0 - build: h0f25b8a_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-h0f25b8a_0.conda - sha256: 697ce350677465c2532c988ad6a4754c471ffa587c1b6e8833c2e73c0a697300 - md5: 7477b3a3f09927fbc47cd69e3f3430ea + build: h86941f0_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h86941f0_1.conda + sha256: 12d95251a8793ea2e78f494e69353a930e9ea06bbaaaa4ccb6e5b3e35ee0744f + md5: 60452336e7f61f6fdaaff69264ee112e depends: - - __glibc >=2.17,<3.0.a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 + - __osx >=10.13 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libgcc >=13 - - libstdcxx >=13 + - libcxx >=17 license: MIT license_family: MIT purls: [] - size: 286941 - timestamp: 1727554386014 + size: 200991 + timestamp: 1728729588371 - kind: conda name: azure-storage-files-datalake-cpp version: 12.12.0 - build: h2123174_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/azure-storage-files-datalake-cpp-12.12.0-h2123174_0.conda - sha256: ca5a432e5a68757cc6bf64c25d3bb45398e7a68b9ed1c346f266dbcc8c9d3af7 - md5: 8efd606971a03ceacf25feac63968834 + build: ha633028_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda + sha256: 5371e4f3f920933bb89b926a85a67f24388227419abd6e99f6086481e5e8d5f2 + md5: 7c1980f89dd41b097549782121a73490 depends: - - __osx >=10.13 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 + - __glibc >=2.17,<3.0.a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - - libcxx >=17 + - libgcc >=13 + - libstdcxx >=13 license: MIT license_family: MIT purls: [] - size: 200677 - timestamp: 1727554487274 + size: 287366 + timestamp: 1728729530295 - kind: conda name: babel version: 2.14.0 @@ -3976,6 +3981,72 @@ packages: - pkg:pypi/beautifulsoup4?source=hash-mapping size: 118200 timestamp: 1705564819537 +- kind: conda + name: black + version: 24.10.0 + build: py312h2e8e312_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/black-24.10.0-py312h2e8e312_0.conda + sha256: 64df9c7e1454386b5ec763e82a40062b47e80700b1bc556878b7aa7b659c3ae1 + md5: 6e943a224409da3599a8ec52944e3c15 + depends: + - click >=8.0.0 + - mypy_extensions >=0.4.3 + - packaging >=22.0 + - pathspec >=0.9 + - platformdirs >=2 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: MIT + license_family: MIT + purls: + - pkg:pypi/black?source=hash-mapping + size: 417913 + timestamp: 1728504045145 +- kind: conda + name: black + version: 24.10.0 + build: py312h7900ff3_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/black-24.10.0-py312h7900ff3_0.conda + sha256: 2b4344d18328b3e8fd9b5356f4ee15556779766db8cb21ecf2ff818809773df6 + md5: 2daba153b913b1b901cf61440ad5e019 + depends: + - click >=8.0.0 + - mypy_extensions >=0.4.3 + - packaging >=22.0 + - pathspec >=0.9 + - platformdirs >=2 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: MIT + license_family: MIT + purls: + - pkg:pypi/black?source=hash-mapping + size: 390571 + timestamp: 1728503839694 +- kind: conda + name: black + version: 24.10.0 + build: py312hb401068_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/black-24.10.0-py312hb401068_0.conda + sha256: a1397d32f6d40ff19107bab8c1570f3934ad91a601d1d973b129eabe08b943e6 + md5: e832f4c2afb84e85718008b600944bc0 + depends: + - click >=8.0.0 + - mypy_extensions >=0.4.3 + - packaging >=22.0 + - pathspec >=0.9 + - platformdirs >=2 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: MIT + license_family: MIT + purls: + - pkg:pypi/black?source=hash-mapping + size: 393514 + timestamp: 1728503944080 - kind: conda name: bleach version: 6.1.0 @@ -4060,13 +4131,13 @@ packages: timestamp: 1719266029046 - kind: conda name: bokeh - version: 3.5.2 + version: 3.6.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda - sha256: 8af284264eb1cb9c08586ac8c212dcafc929ef1de3db9d0d7f8ca75190a30f4b - md5: 38d785787ec83d0431b3855328395113 + url: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda + sha256: 6658b1ac45ba1c6a486b0b1eb22235a42435e5b014ec3ca696e41c6fc43761a3 + md5: 6728ca650187933a007b89f00ece4279 depends: - contourpy >=1.2 - jinja2 >=2.9 @@ -4082,8 +4153,8 @@ packages: license_family: BSD purls: - pkg:pypi/bokeh?source=hash-mapping - size: 4798991 - timestamp: 1724417639170 + size: 4519248 + timestamp: 1728932643855 - kind: pypi name: bokeh-helpers version: 0.1.0 @@ -4350,12 +4421,12 @@ packages: timestamp: 1720974491916 - kind: conda name: c-ares - version: 1.34.1 + version: 1.34.2 build: h2466b09_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.1-h2466b09_0.conda - sha256: 8fb691558b774f09e279bc10a401e39507f634d5762993b2fb4e5c49b772f933 - md5: beed99d68e1513f7af98a3ea8c8bd9ab + url: https://conda.anaconda.org/conda-forge/win-64/c-ares-1.34.2-h2466b09_0.conda + sha256: 5b7a6bb814bc2df92c0c08d7f2f63ae5bc4d71efdc6131130bdc230a8db936fc + md5: 6fcf481938188279f28757a4814a4b73 depends: - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -4363,39 +4434,39 @@ packages: license: MIT license_family: MIT purls: [] - size: 193256 - timestamp: 1728535073722 + size: 192859 + timestamp: 1729006899124 - kind: conda name: c-ares - version: 1.34.1 - build: h44e7173_0 + version: 1.34.2 + build: h32b1619_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.1-h44e7173_0.conda - sha256: 004fea4112ce5a862271265e908a762843390e1870dacfd1a9a38c9aad902e9c - md5: 611618b0b3949f947da65c96ff9c51fb + url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.2-h32b1619_0.conda + sha256: 972d0403c92c9cd1d1c60e34d80991258125ee880cf5a9289ae83a443d8970cd + md5: 724edfea6dde646c1faf2ce1423e0faa depends: - __osx >=10.13 license: MIT license_family: MIT purls: [] - size: 182470 - timestamp: 1728534760896 + size: 182342 + timestamp: 1729006698430 - kind: conda name: c-ares - version: 1.34.1 + version: 1.34.2 build: heb4867d_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.1-heb4867d_0.conda - sha256: d7e50b2ce3ef01dfbb11e8f50411b4be91b92c94cd10a83c843f1f2e53832e04 - md5: db792eada25e970c46642f624b029fd7 + url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda + sha256: c2a515e623ac3e17a56027c06098fbd5ab47afefefbd386b4c21289f2ec55139 + md5: 2b780c0338fc0ffa678ac82c54af51fd depends: - __glibc >=2.28,<3.0.a0 - libgcc >=13 license: MIT license_family: MIT purls: [] - size: 204891 - timestamp: 1728534705052 + size: 205797 + timestamp: 1729006575652 - kind: conda name: ca-certificates version: 2024.8.30 @@ -4665,60 +4736,63 @@ packages: - kind: conda name: cfitsio version: 4.4.1 - build: ha105788_0 + build: h47b6969_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-ha105788_0.conda - sha256: 6b54b24abd3122d33d80a59a901cd51b26b6d47fbb9f84c2bf1f87606e9899c6 - md5: 99445be39aaea44a05046c479f8c6dc9 + url: https://conda.anaconda.org/conda-forge/osx-64/cfitsio-4.4.1-h47b6969_1.conda + sha256: c33947db36f41231925258461252cb0c8241d54e1c7fdc36439c1267f3b107b7 + md5: 652baccec76775b65c684528a6fc1ba8 depends: - __osx >=10.13 - bzip2 >=1.0.8,<2.0a0 - - libcurl >=8.8.0,<9.0a0 + - libcurl >=8.10.1,<9.0a0 - libgfortran 5.* - - libgfortran5 >=12.3.0 - libgfortran5 >=13.2.0 - libzlib >=1.3.1,<2.0a0 license: LicenseRef-fitsio purls: [] - size: 849075 - timestamp: 1718906514228 + size: 848868 + timestamp: 1729002110318 - kind: conda name: cfitsio version: 4.4.1 - build: hc2ea260_0 + build: h5bc515d_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-hc2ea260_0.conda - sha256: 97249ec67f115c05a2a452e62f6aed2e3f3a244ba1f33b0e9395a05f9d7f6fee - md5: b3263858e6a924d05dc2e9ce335593ba + url: https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.4.1-h5bc515d_1.conda + sha256: db17ccd75e9ac1c55c61db02749d220d0e22cb7d2b4ed1a6dd74ebbc2622aae4 + md5: fe95969b81f7a88c0b25e8576626cacb depends: - - libcurl >=8.8.0,<9.0a0 + - libcurl >=8.10.1,<9.0a0 - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: LicenseRef-fitsio purls: [] - size: 601046 - timestamp: 1718906922426 + size: 601712 + timestamp: 1729002632977 - kind: conda name: cfitsio version: 4.4.1 - build: hf8ad068_0 + build: ha728647_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda - sha256: 74ed4d8b327fa775d9c87e476a7221b74fb913aadcef207622596a99683c8faf - md5: 1b7a01fd02d11efe0eb5a676842a7b7d + url: https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-ha728647_1.conda + sha256: 2410f48d7e64e4a6379bb6806507468bacfd275139ca5531a51ae55d6c7a0a0c + md5: b0cffae5b5354ae441cacc46d31ea436 depends: + - __glibc >=2.17,<3.0.a0 - bzip2 >=1.0.8,<2.0a0 - - libcurl >=8.8.0,<9.0a0 - - libgcc-ng >=12 - - libgfortran-ng - - libgfortran5 >=12.3.0 + - libcurl >=8.10.1,<9.0a0 + - libgcc >=13 + - libgfortran + - libgfortran5 >=13.3.0 - libzlib >=1.3.1,<2.0a0 license: LicenseRef-fitsio purls: [] - size: 924198 - timestamp: 1718906379286 + size: 907215 + timestamp: 1729002082744 - kind: conda name: charset-normalizer version: 3.4.0 @@ -4812,21 +4886,22 @@ packages: timestamp: 1633637895378 - kind: conda name: cloudpickle - version: 3.0.0 - build: pyhd8ed1ab_0 + version: 3.1.0 + build: pyhd8ed1ab_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda - sha256: 0dfbc1ffa72e7a0882f486c9b1e4e9cccb68cf5c576fe53a89d076c9f1d43754 - md5: 753d29fe41bb881e4b9c004f0abf973f + url: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda + sha256: 5a33d0d3ef33121c546eaf78b3dac2141fc4d30bbaeb3959bbc66fcd5e99ced6 + md5: c88ca2bb7099167912e3b26463fff079 depends: - python >=3.8 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/cloudpickle?source=hash-mapping - size: 24746 - timestamp: 1697464875382 + size: 25952 + timestamp: 1729059365471 - kind: conda name: colorama version: 0.4.6 @@ -4954,30 +5029,34 @@ packages: timestamp: 1727294254434 - kind: conda name: coverage - version: 7.6.2 - build: py312h3d0f464_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.2-py312h3d0f464_0.conda - sha256: 892c9a4dc830d4ab1b4a4abd10c079338a0a80d81adf0940e45ad2762db8315a - md5: 8e87296799e87fa5de5e82f4473cf764 + version: 7.6.3 + build: py312h178313f_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.3-py312h178313f_1.conda + sha256: 7a18f2c772b05c60c3e50e81c9951dc8eb3e598c162a9075584e82469afff979 + md5: 2621104ac246594948615017c1254c66 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tomli license: Apache-2.0 + license_family: APACHE purls: - pkg:pypi/coverage?source=hash-mapping - size: 362094 - timestamp: 1728527536188 + size: 363035 + timestamp: 1729007130762 - kind: conda name: coverage - version: 7.6.2 - build: py312h4389bb4_0 + version: 7.6.3 + build: py312h31fea79_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.2-py312h4389bb4_0.conda - sha256: 442f865d0c18243a7b8ffeb8eece05929a5345b8464185008d651683dab97a40 - md5: b0e1bc38ada7fdf2dd7343bb9f294cd0 + url: https://conda.anaconda.org/conda-forge/win-64/coverage-7.6.3-py312h31fea79_1.conda + sha256: 525d85c76d2410c014727ddcfa156e0961209ccf09b148cf7d1a4f8eba1800b5 + md5: 6d680daeeba7981e06a5982d6f79a852 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -4986,29 +5065,31 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: Apache-2.0 + license_family: APACHE purls: - pkg:pypi/coverage?source=hash-mapping - size: 387590 - timestamp: 1728528255329 + size: 387904 + timestamp: 1729007342447 - kind: conda name: coverage - version: 7.6.2 - build: py312h66e93f0_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.2-py312h66e93f0_0.conda - sha256: a48fd12d3a2b021998fff3588cbd811386c64528111d5d284a73dfc9a552495b - md5: fa85b4b778217fbeb88425985f001497 + version: 7.6.3 + build: py312hbe3f5e4_1 + build_number: 1 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/coverage-7.6.3-py312hbe3f5e4_1.conda + sha256: 1f2abd17eff2a185844f407d2bc217bb9537e844d7bca7ccbe0de05ac88e024c + md5: 98530658b00ac6172199ba4a03d5169a depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tomli license: Apache-2.0 + license_family: APACHE purls: - pkg:pypi/coverage?source=hash-mapping - size: 363534 - timestamp: 1728527478116 + size: 361178 + timestamp: 1729007075014 - kind: conda name: cpython version: 3.12.7 @@ -5188,42 +5269,41 @@ packages: timestamp: 1683598364427 - kind: conda name: dask - version: 2024.9.1 + version: 2024.10.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.9.1-pyhd8ed1ab_0.conda - sha256: 0007c996c91891df3a3fe3d6b8265f0b602396989d4ce87cd78d88fd94dfac48 - md5: f4a81fe958755b2db083566a6a2da06f + url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda + sha256: 1e4f516d536ecb7cb11c1ab4fa60e9862ed4d8ff84441f769d88e413510893a5 + md5: 719832923b1d98803d07b2ca38eb3baa depends: - bokeh >=3.1.0 - cytoolz >=0.11.0 - - dask-core >=2024.9.1,<2024.9.2.0a0 + - dask-core >=2024.10.0,<2024.10.1.0a0 - dask-expr >=1.1,<1.2 - - distributed >=2024.9.1,<2024.9.2.0a0 + - distributed >=2024.10.0,<2024.10.1.0a0 - jinja2 >=2.10.3 - lz4 >=4.3.2 - numpy >=1.24 - pandas >=2.0 - - pyarrow >=7.0 - - pyarrow-hotfix + - pyarrow >=14.0.1 - python >=3.10 constrains: - openssl !=1.1.1e license: BSD-3-Clause license_family: BSD purls: [] - size: 7417 - timestamp: 1727494165691 + size: 7416 + timestamp: 1729259321643 - kind: conda name: dask-core - version: 2024.9.1 + version: 2024.10.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.9.1-pyhd8ed1ab_0.conda - sha256: 08d01f45f711fcb093e04a491825f9dd0f4129e6432587f5f84a3cbd10a4030d - md5: 0bcf33226f8dbe7e2d6acefb99a7323f + url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda + sha256: 8523eb7b861cae9a87e781ad8fe1d2121910eb522c3a16e632f71674bfff3b7b + md5: 7823092a3cf14e98a52d2a2875c47c80 depends: - click >=8.1 - cloudpickle >=3.0.0 @@ -5238,28 +5318,28 @@ packages: license_family: BSD purls: - pkg:pypi/dask?source=hash-mapping - size: 896858 - timestamp: 1727485758122 + size: 899752 + timestamp: 1729174208253 - kind: conda name: dask-expr - version: 1.1.15 + version: 1.1.16 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.15-pyhd8ed1ab_0.conda - sha256: 7ff9c79fc369de2b9da433108cd02ed5a99045493b366f6ce7acaa2dd097a6b0 - md5: 865cd3fdeffd42a9682f3bb992e828e8 + url: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda + sha256: b279d5cde7e049cb859f21938aee9ffec2c25460f651431c809d19a34b3ffa34 + md5: 81de1c44ab7f6cadab4a59b6d76dfa87 depends: - - dask-core 2024.9.1 + - dask-core 2024.10.0 - pandas >=2 - - pyarrow + - pyarrow >=14.0.1 - python >=3.10 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/dask-expr?source=hash-mapping - size: 185216 - timestamp: 1727490062118 + size: 184799 + timestamp: 1729201054149 - kind: conda name: dbus version: 1.13.6 @@ -5280,12 +5360,12 @@ packages: timestamp: 1640112124844 - kind: conda name: debugpy - version: 1.8.6 + version: 1.8.7 build: py312h275cf98_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.6-py312h275cf98_0.conda - sha256: 048929ec428f8f288a60d273a1da897735249f63a4026bc6a8fe3bc4f245a797 - md5: c02f5ecd381057f141fb33c5aa319610 + url: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.7-py312h275cf98_0.conda + sha256: 09e3d6ce099093c95fea5e491aa3027f4a10a290ccea4a4b1f596adb682bdd31 + md5: d44035420f6e1aa2f1983e0860c14ce6 depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -5296,16 +5376,16 @@ packages: license_family: MIT purls: - pkg:pypi/debugpy?source=hash-mapping - size: 3576437 - timestamp: 1727241342607 + size: 3502851 + timestamp: 1728594816374 - kind: conda name: debugpy - version: 1.8.6 + version: 1.8.7 build: py312h2ec8cdc_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.6-py312h2ec8cdc_0.conda - sha256: 3c75b1358046c8b4d9ccd6df509f07859de6554a781a5eb46c90f295c499afab - md5: f5ca5a690ff9100b7a05d26f77d88156 + url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.7-py312h2ec8cdc_0.conda + sha256: e03c74ba23342f580f4cc822e46623561206da4857fd47c84c482f36a121095d + md5: 13e4b568d8f94e2a38f9acd192149516 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -5316,16 +5396,16 @@ packages: license_family: MIT purls: - pkg:pypi/debugpy?source=hash-mapping - size: 2642177 - timestamp: 1727240850721 + size: 2640727 + timestamp: 1728594265044 - kind: conda name: debugpy - version: 1.8.6 + version: 1.8.7 build: py312h5861a67_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.6-py312h5861a67_0.conda - sha256: 71ee52f2b8676767ad781c2038873b06300b851729ca2fc3c4b8a5e211f229b6 - md5: 5dcf9133d68237c59931ab728e6ccadc + url: https://conda.anaconda.org/conda-forge/osx-64/debugpy-1.8.7-py312h5861a67_0.conda + sha256: ecb7d1747b9a29617a9ab6ce2b242b0a5fb6f106b7802adc1ecd4b9014f3eda5 + md5: ce833a8cb2ca51926ece65e1831694d8 depends: - __osx >=10.13 - libcxx >=17 @@ -5335,8 +5415,8 @@ packages: license_family: MIT purls: - pkg:pypi/debugpy?source=hash-mapping - size: 2526304 - timestamp: 1727240828899 + size: 2508736 + timestamp: 1728594477705 - kind: conda name: decorator version: 5.1.1 @@ -5480,24 +5560,25 @@ packages: depends: - python 2.7|>=3.6 license: Apache-2.0 + license_family: APACHE purls: - pkg:pypi/distlib?source=hash-mapping size: 276214 timestamp: 1728557312342 - kind: conda name: distributed - version: 2024.9.1 + version: 2024.10.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.9.1-pyhd8ed1ab_0.conda - sha256: d4d934d3b5c73d8ccadd7c1b37cfba99b096403f33d4cf0085108daeed46e3c9 - md5: a9f1c72da2654a8ae07a33ed3975d328 + url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda + sha256: 2994121c3e2f57e42fac50ccde8fbd08dbc3e1ecd90f6a90d05e77d4cfbe922c + md5: b3b498f7bcc9a2543ad72a3501f3d87b depends: - click >=8.0 - cloudpickle >=3.0.0 - cytoolz >=0.11.2 - - dask-core >=2024.9.1,<2024.9.2.0a0 + - dask-core >=2024.10.0,<2024.10.1.0a0 - jinja2 >=2.10.3 - locket >=1.0.0 - msgpack-python >=1.0.2 @@ -5517,8 +5598,8 @@ packages: license_family: BSD purls: - pkg:pypi/distributed?source=hash-mapping - size: 801109 - timestamp: 1727490025224 + size: 801083 + timestamp: 1729197454118 - kind: conda name: double-conversion version: 3.3.0 @@ -6691,22 +6772,21 @@ packages: timestamp: 1711634622644 - kind: conda name: griffe - version: 1.3.2 + version: 1.5.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/griffe-1.3.2-pyhd8ed1ab_0.conda - sha256: 7e9420f5bd3e1c538dd2e4a6ba76e197c8e173593e575d1fb4ce5761fafdeea8 - md5: db8382698d1f5307b33fbc17bafc1e4e + url: https://conda.anaconda.org/conda-forge/noarch/griffe-1.5.1-pyhd8ed1ab_0.conda + sha256: 591bf3247a0872b76e2cf57cbdb71762913568390f5a745fe0f3f779a16459a9 + md5: 87db2aa0738c4acc5f565388d519fb25 depends: - - astunparse >=1.6 - colorama >=0.4 - - python >=3.8 + - python >=3.9 license: ISC purls: - pkg:pypi/griffe?source=hash-mapping - size: 97688 - timestamp: 1728324149441 + size: 97620 + timestamp: 1729348988898 - kind: conda name: h11 version: 0.14.0 @@ -7307,7 +7387,7 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/ipython?source=compressed-mapping + - pkg:pypi/ipython?source=hash-mapping size: 600094 timestamp: 1727944801855 - kind: conda @@ -8310,114 +8390,116 @@ packages: - kind: conda name: libarrow version: 17.0.0 - build: h297d146_20_cpu - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-h297d146_20_cpu.conda - sha256: 68e302653d238de390bb7ead3e96f3bee18a9dcde7c827e5f9827c48bf7cadd1 - md5: c0b6bc35ca65b358e76d4457eb480100 + build: h3475b9b_22_cpu + build_number: 22 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h3475b9b_22_cpu.conda + sha256: fcdffb7f28f418dceaf904f48eacade5baf8b8157763e9bc18c63d770df4530c + md5: 3caf581f511584b19f33c54e8fa3ebac depends: + - __osx >=10.13 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - azure-identity-cpp >=1.10.0,<1.10.1.0a0 + - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 + - azure-storage-files-datalake-cpp >=12.12.0,<12.12.1.0a0 - bzip2 >=1.0.8,<2.0a0 + - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - - libcrc32c >=1.1.2,<1.2.0a0 - - libcurl >=8.10.1,<9.0a0 - - libgoogle-cloud >=2.29.0,<2.30.0a0 - - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - - libre2-11 >=2023.9.1,<2024.0a0 + - libcxx >=18 + - libgoogle-cloud >=2.30.0,<2.31.0a0 + - libgoogle-cloud-storage >=2.30.0,<2.31.0a0 + - libre2-11 >=2023.9.1 - libutf8proc >=2.8.0,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - orc >=2.0.2,<2.0.3.0a0 - re2 - snappy >=1.2.1,<1.3.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 constrains: - parquet-cpp <0.0a0 - - apache-arrow-proc =*=cpu - arrow-cpp <0.0a0 + - apache-arrow-proc =*=cpu license: Apache-2.0 + license_family: APACHE purls: [] - size: 5064357 - timestamp: 1728535632310 + size: 5908335 + timestamp: 1728880977173 - kind: conda name: libarrow version: 17.0.0 - build: h364f349_20_cpu - build_number: 20 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h364f349_20_cpu.conda - sha256: 58e01b7214111281f897a859c09bc02b14f6bbc9fb97c93aa873ef492a8e55cb - md5: 5d3c3264432d2dd25f8aed1e97102b94 + build: ha019072_22_cpu + build_number: 22 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-17.0.0-ha019072_22_cpu.conda + sha256: ca16fb7db4b4684b61fba1de9f829e29da3bf1926e813ca88478bd95ed9b0d59 + md5: 8d0383773b6bde77e20f65f69ecd4eb8 depends: - - __glibc >=2.17,<3.0.a0 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.9.0,<1.9.1.0a0 - - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - - azure-storage-files-datalake-cpp >=12.12.0,<12.12.1.0a0 - bzip2 >=1.0.8,<2.0a0 - - gflags >=2.2.2,<2.3.0a0 - - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - - libgcc >=13 - - libgoogle-cloud >=2.29.0,<2.30.0a0 - - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - - libre2-11 >=2023.9.1,<2024.0a0 - - libstdcxx >=13 + - libcrc32c >=1.1.2,<1.2.0a0 + - libcurl >=8.10.1,<9.0a0 + - libgoogle-cloud >=2.30.0,<2.31.0a0 + - libgoogle-cloud-storage >=2.30.0,<2.31.0a0 + - libre2-11 >=2023.9.1 - libutf8proc >=2.8.0,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - orc >=2.0.2,<2.0.3.0a0 - re2 - snappy >=1.2.1,<1.3.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 constrains: - - apache-arrow-proc =*=cpu - - parquet-cpp <0.0a0 - arrow-cpp <0.0a0 + - parquet-cpp <0.0a0 + - apache-arrow-proc =*=cpu license: Apache-2.0 + license_family: APACHE purls: [] - size: 8528014 - timestamp: 1728534747835 + size: 5092813 + timestamp: 1728882186127 - kind: conda name: libarrow version: 17.0.0 - build: h74c0fbd_20_cpu - build_number: 20 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-17.0.0-h74c0fbd_20_cpu.conda - sha256: 216533708248423936ac11dc5b289088986ed02372f238a3cfc0f1e891174891 - md5: b10c3ff291e32a7be99948500241dd22 + build: ha07344c_22_cpu + build_number: 22 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-ha07344c_22_cpu.conda + sha256: 8bc36917ae9814769453cac466c27d9894cec4b280fdb7c0e8d229e3a14a2806 + md5: 041e55887514cd414ec7df03d68210fb depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - azure-identity-cpp >=1.10.0,<1.10.1.0a0 - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - azure-storage-files-datalake-cpp >=12.12.0,<12.12.1.0a0 - bzip2 >=1.0.8,<2.0a0 + - gflags >=2.2.2,<2.3.0a0 - glog >=0.7.1,<0.8.0a0 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - libbrotlidec >=1.1.0,<1.2.0a0 - libbrotlienc >=1.1.0,<1.2.0a0 - - libcxx >=18 - - libgoogle-cloud >=2.29.0,<2.30.0a0 - - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - - libre2-11 >=2023.9.1,<2024.0a0 + - libgcc >=13 + - libgoogle-cloud >=2.30.0,<2.31.0a0 + - libgoogle-cloud-storage >=2.30.0,<2.31.0a0 + - libre2-11 >=2023.9.1 + - libstdcxx >=13 - libutf8proc >=2.8.0,<3.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 @@ -8427,192 +8509,202 @@ packages: - zstd >=1.5.6,<1.6.0a0 constrains: - arrow-cpp <0.0a0 - - parquet-cpp <0.0a0 - apache-arrow-proc =*=cpu + - parquet-cpp <0.0a0 license: Apache-2.0 + license_family: APACHE purls: [] - size: 5943895 - timestamp: 1728533748437 + size: 8527600 + timestamp: 1728881566531 - kind: conda name: libarrow-acero version: 17.0.0 - build: h240833e_20_cpu - build_number: 20 + build: h240833e_22_cpu + build_number: 22 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_20_cpu.conda - sha256: 8334e78ef6d83390a240390bbdf1295b3c5d54cdbc1e3079f364e40e8947f58c - md5: 1a60d732f131749c172cca38a7321fb2 + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-acero-17.0.0-h240833e_22_cpu.conda + sha256: ecda8f171c395239e0e8e89fd490274bd73302dd2868e15c4c2337440953de02 + md5: 1ba76b42451a767e28bea7c9c89d83c6 depends: - __osx >=10.13 - - libarrow 17.0.0 h74c0fbd_20_cpu + - libarrow 17.0.0 h3475b9b_22_cpu - libcxx >=18 license: Apache-2.0 + license_family: APACHE purls: [] - size: 519923 - timestamp: 1728533863684 + size: 519546 + timestamp: 1728881135017 - kind: conda name: libarrow-acero version: 17.0.0 - build: h5888daf_20_cpu - build_number: 20 + build: h5888daf_22_cpu + build_number: 22 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_20_cpu.conda - sha256: fd79ee1d7ba766ad4661f7df43a5c2f08c01bb04d41498fc2982b71e4dac7d30 - md5: 011c07ec81754028bceebaa0ac839e12 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_22_cpu.conda + sha256: 4e0b87f03ae5e74aabde0ec97d20be328b478dfc6e46225ff29b5656ef2c35be + md5: 1675812f0edd6e6edede105de6218ff8 depends: - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h364f349_20_cpu + - libarrow 17.0.0 ha07344c_22_cpu - libgcc >=13 - libstdcxx >=13 license: Apache-2.0 + license_family: APACHE purls: [] - size: 608612 - timestamp: 1728534791375 + size: 608752 + timestamp: 1728881613405 - kind: conda name: libarrow-acero version: 17.0.0 - build: hac47afa_20_cpu - build_number: 20 + build: hac47afa_22_cpu + build_number: 22 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_20_cpu.conda - sha256: 9c167a0a48444a5ad763f9f0d6232882278d441ce5ac9c2aedc11af6fb10afa3 - md5: 58aaa12782e5fce95d327bed789cd251 + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-acero-17.0.0-hac47afa_22_cpu.conda + sha256: 29263d59264b60506b15610042ccaa3434cf110ca290f83304c984ec04372595 + md5: 178eef47d2bc31f32d28bdbfcc9c7ee1 depends: - - libarrow 17.0.0 h297d146_20_cpu + - libarrow 17.0.0 ha019072_22_cpu - ucrt >=10.0.20348.0 - vc >=14.3,<15 - vc14_runtime >=14.40.33810 license: Apache-2.0 + license_family: APACHE purls: [] - size: 444400 - timestamp: 1728535698248 + size: 443768 + timestamp: 1728882244505 - kind: conda name: libarrow-dataset version: 17.0.0 - build: h240833e_20_cpu - build_number: 20 + build: h240833e_22_cpu + build_number: 22 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_20_cpu.conda - sha256: 372a191062dbac9684b2fd7141a974d388768292f1afc007b78d060e48c613a2 - md5: 33329715c8411290070ee0a25ec512ff + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-dataset-17.0.0-h240833e_22_cpu.conda + sha256: 6a1ce2acac5f411be0b62c6f53cd06cfd9d59b44e6894c3179c9a2225bf69e12 + md5: 5df7d21b6d376877667d5cd20544a2a2 depends: - __osx >=10.13 - - libarrow 17.0.0 h74c0fbd_20_cpu - - libarrow-acero 17.0.0 h240833e_20_cpu + - libarrow 17.0.0 h3475b9b_22_cpu + - libarrow-acero 17.0.0 h240833e_22_cpu - libcxx >=18 - - libparquet 17.0.0 hc957f30_20_cpu + - libparquet 17.0.0 hc957f30_22_cpu license: Apache-2.0 + license_family: APACHE purls: [] - size: 511557 - timestamp: 1728535062865 + size: 511732 + timestamp: 1728882532415 - kind: conda name: libarrow-dataset version: 17.0.0 - build: h5888daf_20_cpu - build_number: 20 + build: h5888daf_22_cpu + build_number: 22 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_20_cpu.conda - sha256: 3c940c82d8410e6a7aac17c8ba48f415a6b16aad43c9de9ff3a64ac69cbbcaa3 - md5: 0f71b9865661fd45b5d07ab6db734c33 + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_22_cpu.conda + sha256: 5543555aceb2c339b605c924dd62f6e0b00d1b15d7e5e2ad0fc06cc858816c54 + md5: fa5b60076d370bd032f5a97024c86405 depends: - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h364f349_20_cpu - - libarrow-acero 17.0.0 h5888daf_20_cpu + - libarrow 17.0.0 ha07344c_22_cpu + - libarrow-acero 17.0.0 h5888daf_22_cpu - libgcc >=13 - - libparquet 17.0.0 h6bd9018_20_cpu + - libparquet 17.0.0 h6bd9018_22_cpu - libstdcxx >=13 license: Apache-2.0 + license_family: APACHE purls: [] - size: 583751 - timestamp: 1728534883341 + size: 582593 + timestamp: 1728881712619 - kind: conda name: libarrow-dataset version: 17.0.0 - build: hac47afa_20_cpu - build_number: 20 + build: hac47afa_22_cpu + build_number: 22 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_20_cpu.conda - sha256: 49355faa6f9e4fb5ea6701af629c2ea1f0d61cbba8f04bd04ee6a28851871988 - md5: f164dad54f67d20dd189bd6a7067966d + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-dataset-17.0.0-hac47afa_22_cpu.conda + sha256: f0ca38a688f9c860dedf11ef61c81687231dc10555931f817c9563ab43ec862a + md5: 00e5beafff9832f4744c28ca658378c8 depends: - - libarrow 17.0.0 h297d146_20_cpu - - libarrow-acero 17.0.0 hac47afa_20_cpu - - libparquet 17.0.0 h59f2d37_20_cpu + - libarrow 17.0.0 ha019072_22_cpu + - libarrow-acero 17.0.0 hac47afa_22_cpu + - libparquet 17.0.0 h59f2d37_22_cpu - ucrt >=10.0.20348.0 - vc >=14.3,<15 - vc14_runtime >=14.40.33810 license: Apache-2.0 + license_family: APACHE purls: [] - size: 432396 - timestamp: 1728535944425 + size: 433205 + timestamp: 1728882464383 - kind: conda name: libarrow-substrait version: 17.0.0 - build: ha9530af_20_cpu - build_number: 20 + build: ha9530af_22_cpu + build_number: 22 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_20_cpu.conda - sha256: 3e111be012bae997a2e65f5282b33cc2a6f9655cf0831a3db95261933c3010b4 - md5: 6b79fa44f56144ce6682d64ed1d64b7c + url: https://conda.anaconda.org/conda-forge/win-64/libarrow-substrait-17.0.0-ha9530af_22_cpu.conda + sha256: 65cf15b45147b2a5ec1b6fb9134b717210addcdacb26cd60c340e07350e763a0 + md5: 3e98449318b102aecd59996fc480f17b depends: - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - - libarrow 17.0.0 h297d146_20_cpu - - libarrow-acero 17.0.0 hac47afa_20_cpu - - libarrow-dataset 17.0.0 hac47afa_20_cpu + - libarrow 17.0.0 ha019072_22_cpu + - libarrow-acero 17.0.0 hac47afa_22_cpu + - libarrow-dataset 17.0.0 hac47afa_22_cpu - libprotobuf >=5.27.5,<5.27.6.0a0 - ucrt >=10.0.20348.0 - vc >=14.3,<15 - vc14_runtime >=14.40.33810 license: Apache-2.0 + license_family: APACHE purls: [] - size: 365133 - timestamp: 1728536047685 + size: 365023 + timestamp: 1728882559542 - kind: conda name: libarrow-substrait version: 17.0.0 - build: hdefb866_20_cpu - build_number: 20 + build: hdefb866_22_cpu + build_number: 22 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_20_cpu.conda - sha256: 8e5370a0d28e1c69cf7d1169d786a30344d41f134b795e9d56665ff76c728153 - md5: e40c150c8d31081099f4e592028ff82d + url: https://conda.anaconda.org/conda-forge/osx-64/libarrow-substrait-17.0.0-hdefb866_22_cpu.conda + sha256: 61f3cb65345e9510666d80bcbe2d5e83d358f420a8008c8946f14b43d34bcb9f + md5: dfdcf8685ec0c7b38dcba2018798c33d depends: - __osx >=10.13 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - - libarrow 17.0.0 h74c0fbd_20_cpu - - libarrow-acero 17.0.0 h240833e_20_cpu - - libarrow-dataset 17.0.0 h240833e_20_cpu + - libarrow 17.0.0 h3475b9b_22_cpu + - libarrow-acero 17.0.0 h240833e_22_cpu + - libarrow-dataset 17.0.0 h240833e_22_cpu - libcxx >=18 - libprotobuf >=5.27.5,<5.27.6.0a0 license: Apache-2.0 + license_family: APACHE purls: [] - size: 459555 - timestamp: 1728535286463 + size: 459487 + timestamp: 1728882757491 - kind: conda name: libarrow-substrait version: 17.0.0 - build: he882d9a_20_cpu - build_number: 20 + build: he882d9a_22_cpu + build_number: 22 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_20_cpu.conda - sha256: 1fdd139fd70aaf309d0c05a7fc7712747634a61fa840319c05f27b7445c7c8bc - md5: faffcf59a3de9cc7bf0d8b0962ecf6eb + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-he882d9a_22_cpu.conda + sha256: 251884b03ee81a35621fa6d20dceae105d01d8019a48230c9f5b4a4fcd447c7c + md5: 764d9eaef6da3e22f3d988e93ec7896b depends: - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - - libarrow 17.0.0 h364f349_20_cpu - - libarrow-acero 17.0.0 h5888daf_20_cpu - - libarrow-dataset 17.0.0 h5888daf_20_cpu + - libarrow 17.0.0 ha07344c_22_cpu + - libarrow-acero 17.0.0 h5888daf_22_cpu + - libarrow-dataset 17.0.0 h5888daf_22_cpu - libgcc >=13 - libprotobuf >=5.27.5,<5.27.6.0a0 - libstdcxx >=13 license: Apache-2.0 + license_family: APACHE purls: [] - size: 515897 - timestamp: 1728534924050 + size: 515581 + timestamp: 1728881756281 - kind: conda name: libblas version: 3.9.0 @@ -8899,48 +8991,51 @@ packages: timestamp: 1726669449378 - kind: conda name: libclang-cpp19.1 - version: 19.1.1 - build: default_hb5137d0_0 + version: 19.1.2 + build: default_hb5137d0_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.1-default_hb5137d0_0.conda - sha256: a2fb20bdcbebf94d654a4e770ddc910b0e1fcefe2b5acbd5dec04cb19129df2c - md5: a5feadc4a296e2d31ab5a642498ff85e + url: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.2-default_hb5137d0_1.conda + sha256: 2638abec6f79942a9176b30b2ea70bd47967e873d3d120822cbab38ab4895c14 + md5: 7e574c7499bc41f92537634a23fed79a depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - - libllvm19 >=19.1.1,<19.2.0a0 + - libllvm19 >=19.1.2,<19.2.0a0 - libstdcxx >=13 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 20542477 - timestamp: 1728456712882 + size: 20533631 + timestamp: 1729290507675 - kind: conda name: libclang13 - version: 19.1.1 - build: default_h9c6a7e4_0 + version: 19.1.2 + build: default_h9c6a7e4_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.1-default_h9c6a7e4_0.conda - sha256: fa782c361fd77574cdd3e99762e82b8f02bc8b7da9098e8e5d5b925a153840fe - md5: 2e8992c584c2525a5b8ec7485cbe360c + url: https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.2-default_h9c6a7e4_1.conda + sha256: 8a38fb764bf65cc18f03006db6aeb345d390102182db2e46fd3f452a1b2dcfcc + md5: cb5c5ff12b37aded00d9aaa7b9a86a78 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - - libllvm19 >=19.1.1,<19.2.0a0 + - libllvm19 >=19.1.2,<19.2.0a0 - libstdcxx >=13 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 11820007 - timestamp: 1728456910135 + size: 11819644 + timestamp: 1729290739883 - kind: conda name: libclang13 - version: 19.1.1 - build: default_ha5278ca_0 + version: 19.1.2 + build: default_ha5278ca_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.1-default_ha5278ca_0.conda - sha256: 107be8d6156bc3395e2576dbbfddb7a6b72c9efbbe083a6d3f86611110f6e4cf - md5: 72f980e3852ad8f490485868bd391851 + url: https://conda.anaconda.org/conda-forge/win-64/libclang13-19.1.2-default_ha5278ca_1.conda + sha256: 4349ba29e57a7487e3abe243037dc53185a523afe4d7a889c227e05ab3dfd5b9 + md5: c38f43ef7461c7fac0d5010153ae8d42 depends: - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 @@ -8950,8 +9045,8 @@ packages: license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 26750479 - timestamp: 1728476423850 + size: 26750034 + timestamp: 1729293730509 - kind: conda name: libcrc32c version: 1.1.2 @@ -9083,19 +9178,19 @@ packages: timestamp: 1726659794676 - kind: conda name: libcxx - version: 19.1.1 + version: 19.1.2 build: hf95d169_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.1-hf95d169_0.conda - sha256: 390ee50a14fe5b6ac87b64eeb0130c7a79853641ae9a8926687556c76a645889 - md5: 2b09d0f92cae6df4b1670adcaca9c38c + url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.2-hf95d169_0.conda + sha256: 04593566411ce8dc6400777c772c10a153ebf1082b104ee52a98562a24a50880 + md5: 8bdfb741a2cdbd0a4e7b7dc30fbc0d6c depends: - __osx >=10.13 license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 528308 - timestamp: 1727863581528 + size: 526600 + timestamp: 1729038055775 - kind: conda name: libdeflate version: '1.22' @@ -9395,61 +9490,61 @@ packages: timestamp: 1636489106777 - kind: conda name: libgcc - version: 14.1.0 + version: 14.2.0 build: h1383e82_1 build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.1.0-h1383e82_1.conda - sha256: 727d3659035d7b3c6c07c2cf90e7886ae81fd03229abf3ec9f836d9aeca11d2a - md5: 5464b6bb50d593b8f529d1fbcd58f3b2 + url: https://conda.anaconda.org/conda-forge/win-64/libgcc-14.2.0-h1383e82_1.conda + sha256: ef840e797714440bb10b69446d815966fff41fdac79f79c4e19c475d81cd375d + md5: 75fdd34824997a0f9950a703b15d8ac5 depends: - _openmp_mutex >=4.5 - libwinpthread >=12.0.0.r4.gg4f2fc60ca constrains: + - libgcc-ng ==14.2.0=*_1 + - libgomp 14.2.0 h1383e82_1 - msys2-conda-epoch <0.0a0 - - libgomp 14.1.0 h1383e82_1 - - libgcc-ng ==14.1.0=*_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 665353 - timestamp: 1724805164393 + size: 666386 + timestamp: 1729089506769 - kind: conda name: libgcc - version: 14.1.0 + version: 14.2.0 build: h77fa898_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 - md5: 002ef4463dd1e2b44a94a4ace468f5d2 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda + sha256: 53eb8a79365e58849e7b1a068d31f4f9e718dc938d6f2c03e960345739a03569 + md5: 3cb76c3f10d3bc7f1105b2fc9db984df depends: - _libgcc_mutex 0.1 conda_forge - _openmp_mutex >=4.5 constrains: - - libgomp 14.1.0 h77fa898_1 - - libgcc-ng ==14.1.0=*_1 + - libgomp 14.2.0 h77fa898_1 + - libgcc-ng ==14.2.0=*_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 846380 - timestamp: 1724801836552 + size: 848745 + timestamp: 1729027721139 - kind: conda name: libgcc-ng - version: 14.1.0 + version: 14.2.0 build: h69a702a_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda - sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 - md5: 1efc0ad219877a73ef977af7dbb51f17 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda + sha256: 3a76969c80e9af8b6e7a55090088bc41da4cffcde9e2c71b17f44d37b7cb87f7 + md5: e39480b9ca41323497b05492a63bc35b depends: - - libgcc 14.1.0 h77fa898_1 + - libgcc 14.2.0 h77fa898_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 52170 - timestamp: 1724801842101 + size: 54142 + timestamp: 1729027726517 - kind: conda name: libgdal version: 3.9.2 @@ -10462,38 +10557,38 @@ packages: timestamp: 1707328956438 - kind: conda name: libgfortran - version: 14.1.0 + version: 14.2.0 build: h69a702a_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda - sha256: ed77f04f873e43a26e24d443dd090631eedc7d0ace3141baaefd96a123e47535 - md5: 591e631bc1ae62c64f2ab4f66178c097 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda + sha256: fc9e7f22a17faf74da904ebfc4d88699013d2992e55505e4aa0eb01770290977 + md5: f1fd30127802683586f768875127a987 depends: - - libgfortran5 14.1.0 hc5f4f2c_1 + - libgfortran5 14.2.0 hd5240d6_1 constrains: - - libgfortran-ng ==14.1.0=*_1 + - libgfortran-ng ==14.2.0=*_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 52142 - timestamp: 1724801872472 + size: 53997 + timestamp: 1729027752995 - kind: conda name: libgfortran-ng - version: 14.1.0 + version: 14.2.0 build: h69a702a_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda - sha256: a2dc35cb7f87bb5beebf102d4085574c6a740e1df58e743185d4434cc5e4e0ae - md5: 16cec94c5992d7f42ae3f9fa8b25df8d + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda + sha256: 423f1e2403f0c665748e42d335e421e53fd03c08d457cfb6f360d329d9459851 + md5: 0a7f4cd238267c88e5d69f7826a407eb depends: - - libgfortran 14.1.0 h69a702a_1 + - libgfortran 14.2.0 h69a702a_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 52212 - timestamp: 1724802086021 + size: 54106 + timestamp: 1729027945817 - kind: conda name: libgfortran5 version: 13.2.0 @@ -10514,22 +10609,22 @@ packages: timestamp: 1707328880361 - kind: conda name: libgfortran5 - version: 14.1.0 - build: hc5f4f2c_1 + version: 14.2.0 + build: hd5240d6_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda - sha256: c40d7db760296bf9c776de12597d2f379f30e890b9ae70c1de962ff2aa1999f6 - md5: 10a0cef64b784d6ab6da50ebca4e984d + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda + sha256: d149a37ca73611e425041f33b9d8dbed6e52ec506fe8cc1fc0ee054bddeb6d5d + md5: 9822b874ea29af082e5d36098d25427d depends: - - libgcc >=14.1.0 + - libgcc >=14.2.0 constrains: - - libgfortran 14.1.0 + - libgfortran 14.2.0 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 1459939 - timestamp: 1724801851300 + size: 1462645 + timestamp: 1729027735353 - kind: conda name: libgl version: 1.7.0 @@ -10549,12 +10644,12 @@ packages: timestamp: 1727968620103 - kind: conda name: libglib - version: 2.82.1 + version: 2.82.2 build: h2ff4ddf_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.1-h2ff4ddf_0.conda - sha256: fe9bebb2347d0fc8c5c9e1dd0750e0d640061dc66712a4218bad46d0adc11131 - md5: 47a2209fa0df11797df0b767d1de1275 + url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda + sha256: 49ee9401d483a76423461c50dcd37f91d070efaec7e4dc2828d8cdd2ce694231 + md5: 13e8e54035ddd2b91875ba399f0f7c04 depends: - __glibc >=2.17,<3.0.a0 - libffi >=3.4,<4.0a0 @@ -10563,55 +10658,55 @@ packages: - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.44,<10.45.0a0 constrains: - - glib 2.82.1 *_0 + - glib 2.82.2 *_0 license: LGPL-2.1-or-later purls: [] - size: 3928640 - timestamp: 1727380513702 + size: 3931898 + timestamp: 1729191404130 - kind: conda name: libglib - version: 2.82.1 - build: h63bbcf2_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.1-h63bbcf2_0.conda - sha256: 9f19b7d33a7f49545fdbd514d2f577e6dc3638b17210c93877b52c021ac5ad22 - md5: 0a17d0518293f31c5495674ad3ab4e89 + version: 2.82.2 + build: h7025463_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.2-h7025463_0.conda + sha256: 7dfbf492b736f8d379f8c3b32a823f0bf2167ff69963e4c940339b146a04c54a + md5: 3e379c1b908a7101ecbc503def24613f depends: - - __osx >=10.13 - libffi >=3.4,<4.0a0 - libiconv >=1.17,<2.0a0 - libintl >=0.22.5,<1.0a0 - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.44,<10.45.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - glib 2.82.1 *_0 + - glib 2.82.2 *_0 license: LGPL-2.1-or-later purls: [] - size: 3729756 - timestamp: 1727380687514 + size: 3810166 + timestamp: 1729192227078 - kind: conda name: libglib - version: 2.82.1 - build: h7025463_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libglib-2.82.1-h7025463_0.conda - sha256: b037bc6a1219f3ab9e3e408edcb0659ac9bae542c50213977e6ade90f70fc62e - md5: f784035a6fcb34f0583ca3bd0dcc6c3b + version: 2.82.2 + build: hb6ef654_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.82.2-hb6ef654_0.conda + sha256: d782be2d8d6784f0b8584ca3cfa93357cddc71b0975560a2bcabd174dac60fff + md5: 2e0511f82f1481210f148e1205fe2482 depends: + - __osx >=10.13 - libffi >=3.4,<4.0a0 - libiconv >=1.17,<2.0a0 - libintl >=0.22.5,<1.0a0 - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.44,<10.45.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 constrains: - - glib 2.82.1 *_0 + - glib 2.82.2 *_0 license: LGPL-2.1-or-later purls: [] - size: 3759023 - timestamp: 1727442499076 + size: 3692367 + timestamp: 1729191628049 - kind: conda name: libglvnd version: 1.7.0 @@ -10646,13 +10741,13 @@ packages: timestamp: 1727968607539 - kind: conda name: libgomp - version: 14.1.0 + version: 14.2.0 build: h1383e82_1 build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.1.0-h1383e82_1.conda - sha256: c7c2c51397d57c2e4d48f8676d340ddf88067886f849128ba7d6bd24619dbccc - md5: f8aa80643cd3ff1767ea4e6008ed52d1 + url: https://conda.anaconda.org/conda-forge/win-64/libgomp-14.2.0-h1383e82_1.conda + sha256: d8739b834608f35775209b032f0c2be752ef187863c7ec847afcebe2f681be4e + md5: 9e2d4d1214df6f21cba12f6eff4972f9 depends: - libwinpthread >=12.0.0.r4.gg4f2fc60ca constrains: @@ -10660,33 +10755,32 @@ packages: license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 522202 - timestamp: 1724805108466 + size: 524249 + timestamp: 1729089441747 - kind: conda name: libgomp - version: 14.1.0 + version: 14.2.0 build: h77fa898_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - sha256: c96724c8ae4ee61af7674c5d9e5a3fbcf6cd887a40ad5a52c99aa36f1d4f9680 - md5: 23c255b008c4f2ae008f81edcabaca89 + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda + sha256: 1911c29975ec99b6b906904040c855772ccb265a1c79d5d75c8ceec4ed89cd63 + md5: cc3573974587f12dda90d96e3e55a702 depends: - _libgcc_mutex 0.1 conda_forge license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 460218 - timestamp: 1724801743478 + size: 460992 + timestamp: 1729027639220 - kind: conda name: libgoogle-cloud - version: 2.29.0 - build: h438788a_1 - build_number: 1 + version: 2.30.0 + build: h438788a_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.29.0-h438788a_1.conda - sha256: cf5c97fb1a270a072faae6decd7e74681e7ead99a1cec6325c8d7a7213bcb2d1 - md5: 3d27459264de681a74c0aebbbd3ecd8f + url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.30.0-h438788a_0.conda + sha256: 506a0997b586536a6bbe8fd260bd50b625a541850507486fa66abc5a99104bce + md5: ab8466a39822527f7786b0d0b2aac223 depends: - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* @@ -10698,21 +10792,20 @@ packages: - libstdcxx >=13 - openssl >=3.3.2,<4.0a0 constrains: - - libgoogle-cloud 2.29.0 *_1 + - libgoogle-cloud 2.30.0 *_0 license: Apache-2.0 license_family: Apache purls: [] - size: 1200532 - timestamp: 1727245497586 + size: 1200100 + timestamp: 1728022256338 - kind: conda name: libgoogle-cloud - version: 2.29.0 - build: ha00044d_1 - build_number: 1 + version: 2.30.0 + build: ha00044d_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.29.0-ha00044d_1.conda - sha256: 829e30b66305374cef5dfc9c8d90915978b0d4c1caf465c5cc35bdba13c94bcb - md5: e7542181fcc204326558a2d3e9e0b5c2 + url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-2.30.0-ha00044d_0.conda + sha256: 2bc9b941eea49287ada92875734f717e4f24fcf9e55c0cdf2e4ead896ad92931 + md5: 6abd86bf0b053dd2fe698568a3f38821 depends: - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 @@ -10723,21 +10816,20 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 constrains: - - libgoogle-cloud 2.29.0 *_1 + - libgoogle-cloud 2.30.0 *_0 license: Apache-2.0 license_family: Apache purls: [] - size: 14469 - timestamp: 1727246130012 + size: 14593 + timestamp: 1728022894892 - kind: conda name: libgoogle-cloud - version: 2.29.0 - build: hade041e_1 - build_number: 1 + version: 2.30.0 + build: hade041e_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.29.0-hade041e_1.conda - sha256: 87e526f00c24f4e7c3a6cfccb966509599708f29cfcbd6a7238a0306d59e036c - md5: 0966331f6b1362dc16aebfa76b0c33ab + url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-2.30.0-hade041e_0.conda + sha256: faf1c644b68306e3a58b471edaaac67f57c193ca543533fe7f29f698e9b9c626 + md5: a431e45b599ada6817543683cf61115c depends: - __osx >=10.13 - libabseil * cxx17* @@ -10748,73 +10840,70 @@ packages: - libprotobuf >=5.27.5,<5.27.6.0a0 - openssl >=3.3.2,<4.0a0 constrains: - - libgoogle-cloud 2.29.0 *_1 + - libgoogle-cloud 2.30.0 *_0 license: Apache-2.0 license_family: Apache purls: [] - size: 864830 - timestamp: 1727245147999 + size: 864474 + timestamp: 1728021663496 - kind: conda name: libgoogle-cloud-storage - version: 2.29.0 - build: h0121fbd_1 - build_number: 1 + version: 2.30.0 + build: h0121fbd_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.29.0-h0121fbd_1.conda - sha256: 78e22048ab9bb554c4269f5e2a4ab9baae2c0f490418e0cdecd04e5c59130805 - md5: ea93fded95ddff7798e28954c446e22f + url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.30.0-h0121fbd_0.conda + sha256: 9fad535d14a204f3646a29f9884c024b69d84120bea5489e14e7dc895b543646 + md5: ad86b6c98964772688298a727cb20ef8 depends: - __glibc >=2.17,<3.0.a0 - libabseil - libcrc32c >=1.1.2,<1.2.0a0 - libcurl - libgcc >=13 - - libgoogle-cloud 2.29.0 h438788a_1 + - libgoogle-cloud 2.30.0 h438788a_0 - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - openssl license: Apache-2.0 license_family: Apache purls: [] - size: 781418 - timestamp: 1727245657213 + size: 782269 + timestamp: 1728022391174 - kind: conda name: libgoogle-cloud-storage - version: 2.29.0 - build: h8126ed0_1 - build_number: 1 + version: 2.30.0 + build: h8126ed0_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.29.0-h8126ed0_1.conda - sha256: 205a2d89c50fa60aa5f61d7b06c6f13a672ad8bfabb3705a234605eb156cf37b - md5: e5c845b99aa7b4d90a6717f2739d622b + url: https://conda.anaconda.org/conda-forge/osx-64/libgoogle-cloud-storage-2.30.0-h8126ed0_0.conda + sha256: 104333dad7dc1ea4467f7f14dedab7ed4bdfc664493605767550262e7488bdb3 + md5: fdfef310fda223057e1e4962146ddf97 depends: - __osx >=10.13 - libabseil - libcrc32c >=1.1.2,<1.2.0a0 - libcurl - libcxx >=17 - - libgoogle-cloud 2.29.0 hade041e_1 + - libgoogle-cloud 2.30.0 hade041e_0 - libzlib >=1.3.1,<2.0a0 - openssl license: Apache-2.0 license_family: Apache purls: [] - size: 551487 - timestamp: 1727246090236 + size: 553911 + timestamp: 1728022491695 - kind: conda name: libgoogle-cloud-storage - version: 2.29.0 - build: he5eb982_1 - build_number: 1 + version: 2.30.0 + build: he5eb982_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.29.0-he5eb982_1.conda - sha256: 9134d894877858f10efa2c7102a7f69f9e3a96caa1f2c4097c45cde41dcc9fe8 - md5: 6d9b4c7bcb190d7ca32531b6504d50b7 + url: https://conda.anaconda.org/conda-forge/win-64/libgoogle-cloud-storage-2.30.0-he5eb982_0.conda + sha256: 2bc1e02125d7a2ca86debc5c7580f3027472439739effc10d96960285593b7de + md5: 116f6a285dbe98e6d4126a88de2878dd depends: - libabseil - libcrc32c >=1.1.2,<1.2.0a0 - libcurl - - libgoogle-cloud 2.29.0 ha00044d_1 + - libgoogle-cloud 2.30.0 ha00044d_0 - libzlib >=1.3.1,<2.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -10822,8 +10911,8 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 14354 - timestamp: 1727246493318 + size: 14456 + timestamp: 1728023196706 - kind: conda name: libgrpc version: 1.65.5 @@ -10837,7 +10926,7 @@ packages: - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - libprotobuf >=5.27.5,<5.27.6.0a0 - - libre2-11 >=2023.9.1,<2024.0a0 + - libre2-11 >=2023.9.1 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.2,<4.0a0 - re2 @@ -10866,7 +10955,7 @@ packages: - libabseil >=20240722.0,<20240723.0a0 - libcxx >=17 - libprotobuf >=5.27.5,<5.27.6.0a0 - - libre2-11 >=2023.9.1,<2024.0a0 + - libre2-11 >=2023.9.1 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.2,<4.0a0 - re2 @@ -10892,7 +10981,7 @@ packages: - libabseil >=20240722.0,<20240723.0a0 - libgcc >=13 - libprotobuf >=5.27.5,<5.27.6.0a0 - - libre2-11 >=2023.9.1,<2024.0a0 + - libre2-11 >=2023.9.1 - libstdcxx >=13 - libzlib >=1.3.1,<2.0a0 - openssl >=3.3.2,<4.0a0 @@ -11190,12 +11279,12 @@ packages: timestamp: 1690557554081 - kind: conda name: libllvm19 - version: 19.1.1 + version: 19.1.2 build: ha7bfdaf_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.1-ha7bfdaf_0.conda - sha256: 11168659796f5cfe02a0db918ee1596e9dcda8a32564b82f429a56af98fff4c9 - md5: 000cd5fc23967c97284b720cc6049c1e + url: https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.2-ha7bfdaf_0.conda + sha256: 8c0eb8f753ef2a449acd846bc5853f7f11d319819bb5bbdf721c8ac0d8db875a + md5: 128e74a4f8f4fef4dc5130a8bbccc15d depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -11206,8 +11295,8 @@ packages: license: Apache-2.0 WITH LLVM-exception license_family: Apache purls: [] - size: 40126224 - timestamp: 1727867289123 + size: 40136241 + timestamp: 1729031844469 - kind: conda name: libnetcdf version: 4.9.2 @@ -11442,62 +11531,65 @@ packages: - kind: conda name: libparquet version: 17.0.0 - build: h59f2d37_20_cpu - build_number: 20 + build: h59f2d37_22_cpu + build_number: 22 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_20_cpu.conda - sha256: 3ba47daa2954169a2da257c310d39788d81b2c42b6a7ba909f9f1661b5567a16 - md5: 430e46b30775e1db5d7d234489bf34b2 + url: https://conda.anaconda.org/conda-forge/win-64/libparquet-17.0.0-h59f2d37_22_cpu.conda + sha256: cf5c5735ce78b11824b545c93bcab09db646f4c43ac5767a35e8aa463777ed4d + md5: 9be6b1ecd2eb73dae9f449aae23850a2 depends: - - libarrow 17.0.0 h297d146_20_cpu + - libarrow 17.0.0 ha019072_22_cpu - libthrift >=0.21.0,<0.21.1.0a0 - openssl >=3.3.2,<4.0a0 - ucrt >=10.0.20348.0 - vc >=14.3,<15 - vc14_runtime >=14.40.33810 license: Apache-2.0 + license_family: APACHE purls: [] - size: 801534 - timestamp: 1728535891220 + size: 802078 + timestamp: 1728882416106 - kind: conda name: libparquet version: 17.0.0 - build: h6bd9018_20_cpu - build_number: 20 + build: h6bd9018_22_cpu + build_number: 22 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_20_cpu.conda - sha256: dfcf9a25e3f687574fbb05c8f14288c3f621a56801a035edb1302d21ec969d7d - md5: 38587a1c1d6657c2998af72ca28e10ed + url: https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h6bd9018_22_cpu.conda + sha256: c81bd947e16dc8106a361c8099b9f3739c89a5cff32ba04b55bc6c3e16b9a969 + md5: 48c058a044a8d1bfd38153d054c2a911 depends: - __glibc >=2.17,<3.0.a0 - - libarrow 17.0.0 h364f349_20_cpu + - libarrow 17.0.0 ha07344c_22_cpu - libgcc >=13 - libstdcxx >=13 - libthrift >=0.21.0,<0.21.1.0a0 - openssl >=3.3.2,<4.0a0 license: Apache-2.0 + license_family: APACHE purls: [] - size: 1188248 - timestamp: 1728534858689 + size: 1188385 + timestamp: 1728881685985 - kind: conda name: libparquet version: 17.0.0 - build: hc957f30_20_cpu - build_number: 20 + build: hc957f30_22_cpu + build_number: 22 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_20_cpu.conda - sha256: fc496df895106ce56f2f951844bb8177734c197fd904d62549b7c75c1d666dcf - md5: 2d9be33ce40ab5506f47931dcb4e4929 + url: https://conda.anaconda.org/conda-forge/osx-64/libparquet-17.0.0-hc957f30_22_cpu.conda + sha256: 547a7442d4ddef70b1a125b5055e1bb0a380eec6d52b336fea2897e590f286a8 + md5: a101bae7bdbb180dca988dcb645d7a29 depends: - __osx >=10.13 - - libarrow 17.0.0 h74c0fbd_20_cpu + - libarrow 17.0.0 h3475b9b_22_cpu - libcxx >=18 - libthrift >=0.21.0,<0.21.1.0a0 - openssl >=3.3.2,<4.0a0 license: Apache-2.0 + license_family: APACHE purls: [] - size: 926343 - timestamp: 1728534954359 + size: 926933 + timestamp: 1728882411492 - kind: conda name: libpciaccess version: '0.18' @@ -11564,12 +11656,12 @@ packages: - kind: conda name: libpq version: '17.0' - build: h04577a9_2 - build_number: 2 + build: h04577a9_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_2.conda - sha256: 48ac53293aba8f8590c16b530def8434033f1f08fe4eaaa897756563b50da7cd - md5: c00807c15530f0cb373a89fd5ead6599 + url: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda + sha256: 2f7e72e32f495cfb0492b8091d97dbe1c0700428fe167f3a781bb46e88dee4e5 + md5: 392cae2a58fbcb9db8c2147c6d6d1620 depends: - __glibc >=2.17,<3.0.a0 - icu >=75.1,<76.0a0 @@ -11579,47 +11671,47 @@ packages: - openssl >=3.3.2,<4.0a0 license: PostgreSQL purls: [] - size: 2621446 - timestamp: 1727852819478 + size: 2602277 + timestamp: 1729085182543 - kind: conda name: libpq version: '17.0' - build: h6e894e8_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-h6e894e8_2.conda - sha256: 103ac6f8b717ba11faf9e158321a81998d69244428fc75fa220ba269dc6222a0 - md5: c95c34362d0f7b12f9d691d378bcc902 + build: h7ec079e_4 + build_number: 4 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_4.conda + sha256: 6fce0f994714a6ca46209abd068843abadc64f36eb29aa3024b89f9b5f006783 + md5: 22f654ece7d6c2b4d543b5d73d7f0481 depends: - - __osx >=10.13 - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - openldap >=2.6.8,<2.7.0a0 - openssl >=3.3.2,<4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: PostgreSQL purls: [] - size: 2523581 - timestamp: 1727853063006 + size: 3814439 + timestamp: 1729085915122 - kind: conda name: libpq version: '17.0' - build: h7ec079e_2 - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libpq-17.0-h7ec079e_2.conda - sha256: e28e770d22a72966d06f84e060eae1cb027a750e583b8e95f6a02568b074e4cd - md5: c3048e5c8f6b907005e5d2bf5ecbd1a6 + build: ha324e28_4 + build_number: 4 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libpq-17.0-ha324e28_4.conda + sha256: b0dff338b01da6fe75fddd918579791121f09c855ce95cf3dfcd10049e75b10b + md5: 2741337667380a37f9cfcafc1fce0ec4 depends: + - __osx >=10.13 - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 + - openldap >=2.6.8,<2.7.0a0 - openssl >=3.3.2,<4.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: PostgreSQL purls: [] - size: 3834481 - timestamp: 1727853577777 + size: 2465943 + timestamp: 1729085510560 - kind: conda name: libprotobuf version: 5.27.5 @@ -11684,12 +11776,13 @@ packages: timestamp: 1727424307861 - kind: conda name: libre2-11 - version: 2023.11.01 - build: h4eb7d71_0 + version: 2024.07.02 + build: h4eb7d71_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2023.11.01-h4eb7d71_0.conda - sha256: a0a1ea69a63357b1aac9d8eda6b98425fc2bb5ae9650233a41455e9290b1aa05 - md5: 985ae0f827e74151966f2977c1cbe67d + url: https://conda.anaconda.org/conda-forge/win-64/libre2-11-2024.07.02-h4eb7d71_1.conda + sha256: 39908d18620d48406ea3492bf111eface5b3a88c1a2d166c6d513b03f450df5d + md5: d8dbfb066c8e3e85439687613d32057d depends: - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 @@ -11697,20 +11790,21 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 constrains: - - re2 2023.11.01.* + - re2 2024.07.02.* license: BSD-3-Clause license_family: BSD purls: [] - size: 262162 - timestamp: 1728466651480 + size: 260860 + timestamp: 1728779502416 - kind: conda name: libre2-11 - version: 2023.11.01 - build: hbbce691_0 + version: 2024.07.02 + build: hbbce691_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.11.01-hbbce691_0.conda - sha256: e06eb02be1dd3a03bb9e2897c2067e26dc32a526a7fdf1f3b1ba3557b3f168d2 - md5: 18ca8e125b84679e416ba2803ff4356e + url: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2024.07.02-hbbce691_1.conda + sha256: f8ad6a4f6d4fd54ebe3e5e712a01e663222fc57f49d16b6b8b10c30990dafb8f + md5: 2124de47357b7a516c0a3efd8f88c143 depends: - __glibc >=2.17,<3.0.a0 - libabseil * cxx17* @@ -11718,32 +11812,33 @@ packages: - libgcc >=13 - libstdcxx >=13 constrains: - - re2 2023.11.01.* + - re2 2024.07.02.* license: BSD-3-Clause license_family: BSD purls: [] - size: 240866 - timestamp: 1728466437052 + size: 211096 + timestamp: 1728778964655 - kind: conda name: libre2-11 - version: 2023.11.01 - build: hd530cb8_0 + version: 2024.07.02 + build: hd530cb8_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2023.11.01-hd530cb8_0.conda - sha256: d25b112d4384c269fedb04c420ab944fced743ad6f85d2cedac137a86e455e77 - md5: 40e38687b043af900e79f0a10c54a0ca + url: https://conda.anaconda.org/conda-forge/osx-64/libre2-11-2024.07.02-hd530cb8_1.conda + sha256: 2fac39fb704ded9584d1a9e7511163830016803f83852a724c2ccef1cc16e17b + md5: 1e14c67a5e8a9273a98b83fbc0905b99 depends: - __osx >=10.13 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - libcxx >=17 constrains: - - re2 2023.11.01.* + - re2 2024.07.02.* license: BSD-3-Clause license_family: BSD purls: [] - size: 185921 - timestamp: 1728466515046 + size: 178580 + timestamp: 1728779037721 - kind: conda name: librttopo version: 1.1.0 @@ -12029,36 +12124,36 @@ packages: timestamp: 1685837820566 - kind: conda name: libstdcxx - version: 14.1.0 + version: 14.2.0 build: hc0a3c3a_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - sha256: 44decb3d23abacf1c6dd59f3c152a7101b7ca565b4ef8872804ceaedcc53a9cd - md5: 9dbb9699ea467983ba8a4ba89b08b066 + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda + sha256: 4661af0eb9bdcbb5fb33e5d0023b001ad4be828fccdcc56500059d56f9869462 + md5: 234a5554c53625688d51062645337328 depends: - - libgcc 14.1.0 h77fa898_1 + - libgcc 14.2.0 h77fa898_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 3892781 - timestamp: 1724801863728 + size: 3893695 + timestamp: 1729027746910 - kind: conda name: libstdcxx-ng - version: 14.1.0 + version: 14.2.0 build: h4852527_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda - sha256: a2dc44f97290740cc187bfe94ce543e6eb3c2ea8964d99f189a1d8c97b419b8c - md5: bd2598399a70bb86d8218e95548d735e + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda + sha256: 25bb30b827d4f6d6f0522cc0579e431695503822f144043b93c50237017fffd8 + md5: 8371ac6457591af2cf6159439c1fd051 depends: - - libstdcxx 14.1.0 hc0a3c3a_1 + - libstdcxx 14.2.0 hc0a3c3a_1 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 52219 - timestamp: 1724801897766 + size: 54105 + timestamp: 1729027780628 - kind: conda name: libthrift version: 0.21.0 @@ -12636,20 +12731,21 @@ packages: timestamp: 1727963183990 - kind: conda name: llvm-openmp - version: 19.1.1 - build: h545e0da_0 + version: 19.1.2 + build: hf78d878_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.1-h545e0da_0.conda - sha256: 7e15f5ac89e750dadbc6fe81dc2909dd056c7324c72379a8440b57a6174a1146 - md5: 3f3e4a599dd2638a945fc5821090db07 + url: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-19.1.2-hf78d878_0.conda + sha256: 92231d391886bca0c0dabb42f02a37e7acb8ea84399843173fe8c294814735dd + md5: ca5f963676a9ad5383b7441368e1d107 depends: - __osx >=10.13 constrains: - - openmp 19.1.1|19.1.1.* + - openmp 19.1.2|19.1.2.* license: Apache-2.0 WITH LLVM-exception + license_family: APACHE purls: [] - size: 305199 - timestamp: 1728517141555 + size: 305589 + timestamp: 1729145249496 - kind: conda name: llvmlite version: 0.43.0 @@ -12936,13 +13032,12 @@ packages: timestamp: 1686175179621 - kind: conda name: markupsafe - version: 3.0.1 - build: py312h178313f_1 - build_number: 1 + version: 3.0.2 + build: py312h178313f_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.1-py312h178313f_1.conda - sha256: d65455297e005c73811848fb3b25a9570d5712c972c7302198ca72698f5f5341 - md5: 490afd4d3627a7f999b9d633c4b6c229 + url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda + sha256: 15f14ab429c846aacd47fada0dc4f341d64491e097782830f0906d00cb7b48b6 + md5: a755704ea0e2503f8c227d84829a8e81 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -12954,17 +13049,16 @@ packages: license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 24814 - timestamp: 1728489245950 + size: 24878 + timestamp: 1729351558563 - kind: conda name: markupsafe - version: 3.0.1 - build: py312h31fea79_1 - build_number: 1 + version: 3.0.2 + build: py312h31fea79_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.1-py312h31fea79_1.conda - sha256: 128416033353673a4e8094e39134dcc5be5ff6ff05b8eb48467d7bfd18cbe354 - md5: 512dc4954c7fe66ed5c2c622a3eb745a + url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_0.conda + sha256: eb0f3768890291f2d5fb666ab31b32b37a821e4a30968c6b3cd332472957abe7 + md5: e2ff001440760f2cbac24765d8a3d84a depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -12977,17 +13071,16 @@ packages: license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 27734 - timestamp: 1728489618033 + size: 27358 + timestamp: 1729351504449 - kind: conda name: markupsafe - version: 3.0.1 - build: py312hca98d7e_1 - build_number: 1 + version: 3.0.2 + build: py312hbe3f5e4_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.1-py312hca98d7e_1.conda - sha256: 848e8be9b74d20a90c120f0f3332df5baad07514edab44e4e8e561b89fbf261b - md5: 136bab776f3fb3ccc6e11f95cf71e658 + url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.2-py312hbe3f5e4_0.conda + sha256: b2fb54718159055fdf89da7d9f0c6743ef84b31960617a56810920d17616d944 + md5: c6238833d7dc908ec295bc490b80d845 depends: - __osx >=10.13 - python >=3.12,<3.13.0a0 @@ -12998,8 +13091,8 @@ packages: license_family: BSD purls: - pkg:pypi/markupsafe?source=hash-mapping - size: 23791 - timestamp: 1728489239217 + size: 23889 + timestamp: 1729351468966 - kind: conda name: matplotlib version: 3.9.2 @@ -13409,59 +13502,59 @@ packages: timestamp: 1600387789153 - kind: conda name: mypy - version: 1.11.2 - build: py312h4389bb4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/mypy-1.11.2-py312h4389bb4_0.conda - sha256: 31d0292518c3c3090af632bc06ffa5f331fa6969ad9ae219e6505a6b2219d0af - md5: dd2e469b2e2f8a1cc4ae749a7ed44b7f + version: 1.12.0 + build: py312h3d0f464_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.12.0-py312h3d0f464_0.conda + sha256: b5f712928261d5902321b9ba741aed44b3cb9d7f5ac5ec8b2050e4621a2796b2 + md5: f2d8a64c06a7331f936dde72ed1bd7ed depends: + - __osx >=10.13 - mypy_extensions >=1.0.0 - psutil >=4.0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - typing_extensions >=4.1.0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/mypy?source=hash-mapping - size: 8560830 - timestamp: 1724602058839 + size: 12278325 + timestamp: 1728976350202 - kind: conda name: mypy - version: 1.11.2 - build: py312h66e93f0_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda - sha256: aadb78145f51b5488806c86e5954cc3cb19b03f2297a464b2a2f27c0340332a8 - md5: ea315027e648236653f27d3d1ae893f6 + version: 1.12.0 + build: py312h4389bb4_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/mypy-1.12.0-py312h4389bb4_0.conda + sha256: e2a2e078766856a9c579251336085b23b4d46e53fe53f76d2e547ba982939a2c + md5: 368526721c0f74e08e343d5d8011251b depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=13 - mypy_extensions >=1.0.0 - psutil >=4.0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - typing_extensions >=4.1.0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT purls: - pkg:pypi/mypy?source=hash-mapping - size: 17066588 - timestamp: 1724602213195 + size: 10125280 + timestamp: 1728975960981 - kind: conda name: mypy - version: 1.11.2 - build: py312hb553811_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.11.2-py312hb553811_0.conda - sha256: 99eced54663f6cf2b8b924f36bc2fc0317075d8bd3c38c47fff55e463687fb04 - md5: 4e22f7fed8b0572fa5d1b12e7a39a570 + version: 1.12.0 + build: py312h66e93f0_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.12.0-py312h66e93f0_0.conda + sha256: 8725ef1244551cb1db0b1bfd887a54609aa7a0d9953163e74635220ad7a6f172 + md5: 8f982700057360f0950ea2221227174b depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - mypy_extensions >=1.0.0 - psutil >=4.0 - python >=3.12,<3.13.0a0 @@ -13471,8 +13564,8 @@ packages: license_family: MIT purls: - pkg:pypi/mypy?source=hash-mapping - size: 10502065 - timestamp: 1724601972090 + size: 18806239 + timestamp: 1728976530322 - kind: conda name: mypy_extensions version: 1.0.0 @@ -13660,27 +13753,26 @@ packages: timestamp: 1705850780510 - kind: conda name: networkx - version: '3.3' - build: pyhd8ed1ab_1 - build_number: 1 + version: 3.4.1 + build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda - sha256: cbd8a6de87ad842e7665df38dcec719873fe74698bc761de5431047b8fada41a - md5: d335fd5704b46f4efb89a6774e81aef0 + url: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda + sha256: f753c9a2be8ad02077f027f4e03d9531b305c5297d3708c410cf95b99195b335 + md5: 4994669899eb2e84ab855edcb71efc58 depends: - python >=3.10 constrains: - - pandas >=1.4 - - numpy >=1.22 - - matplotlib >=3.5 - - scipy >=1.9,!=1.11.0,!=1.11.1 + - scipy >=1.10,!=1.11.0,!=1.11.1 + - numpy >=1.24 + - matplotlib >=3.7 + - pandas >=2.0 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/networkx?source=hash-mapping - size: 1185670 - timestamp: 1712540499262 + size: 1197665 + timestamp: 1728721145589 - kind: conda name: nodeenv version: 1.9.1 @@ -13880,13 +13972,13 @@ packages: timestamp: 1718888811663 - kind: conda name: numba_celltree - version: 0.2.1 + version: 0.2.2 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.1-pyhd8ed1ab_0.conda - sha256: 84d3c63a3a58c2039a50738be391b3401e148b5a39911a191acbffc680d3313a - md5: bcc32246fc51d0ca5893fea58314d630 + url: https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.2.2-pyhd8ed1ab_0.conda + sha256: c71b84ddb772ec3d242b5d6705677100b44fb824a670ff5ad49c0614527ce4c2 + md5: 40b8e3c1e26ce871c71b9be2045e07ce depends: - numba >=0.50 - numpy @@ -13895,8 +13987,8 @@ packages: license_family: MIT purls: - pkg:pypi/numba-celltree?source=hash-mapping - size: 33709 - timestamp: 1727983962954 + size: 34627 + timestamp: 1729059997869 - kind: conda name: numpy version: 2.0.2 @@ -14398,6 +14490,7 @@ packages: - python >=3.9 - types-pytz >=2022.1.1 license: BSD-3-Clause + license_family: BSD purls: - pkg:pypi/pandas-stubs?source=hash-mapping size: 98654 @@ -14535,6 +14628,23 @@ packages: - pkg:pypi/partd?source=hash-mapping size: 20884 timestamp: 1715026639309 +- kind: conda + name: pathspec + version: 0.12.1 + build: pyhd8ed1ab_0 + subdir: noarch + noarch: python + url: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda + sha256: 4e534e66bfe8b1e035d2169d0e5b185450546b17e36764272863e22e0370be4d + md5: 17064acba08d3686f1135b5ec1b32b12 + depends: + - python >=3.7 + license: MPL-2.0 + license_family: MOZILLA + purls: + - pkg:pypi/pathspec?source=hash-mapping + size: 41173 + timestamp: 1702250135032 - kind: conda name: pcre2 version: '10.44' @@ -14644,51 +14754,47 @@ packages: timestamp: 1602536313357 - kind: conda name: pillow - version: 10.4.0 - build: py312h381445a_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pillow-10.4.0-py312h381445a_1.conda - sha256: 0b52e708ac4b72e6e1608de517cd4c8e6517dd525e23163a69bf73c7261399fc - md5: c57e54ae4acca720fb3a44bee93cb5b9 + version: 11.0.0 + build: py312h66fe14f_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/pillow-11.0.0-py312h66fe14f_0.conda + sha256: 5e531eded0bb784c745abe3a1187c6c33478e153755bf8a8496aebff60801150 + md5: 1e49b81b5aae7af9d74bcdac0cd0d174 depends: + - __osx >=10.13 - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.8.0a0 + - libtiff >=4.7.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - - libxcb >=1.16,<2.0.0a0 + - libxcb >=1.17.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.2,<3.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: HPND purls: - pkg:pypi/pillow?source=hash-mapping - size: 42468305 - timestamp: 1726075694989 + size: 42189378 + timestamp: 1729065985392 - kind: conda name: pillow - version: 10.4.0 - build: py312h56024de_1 - build_number: 1 + version: 11.0.0 + build: py312h7b63e92_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h56024de_1.conda - sha256: a0961e7ff663d4c7a82478ff45fba72a346070f2a017a9b56daff279c0dbb8e2 - md5: 4bd6077376c7f9c1ce33fd8319069e5b + url: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda + sha256: 13a464bea02c0df0199c20ef6bad24a6bc336aaf55bf8d6a133d0fe664463224 + md5: 385f46a4df6f97892503a841121a9acf depends: - __glibc >=2.17,<3.0.a0 - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libgcc >=13 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.8.0a0 + - libtiff >=4.7.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - - libxcb >=1.16,<2.0.0a0 + - libxcb >=1.17.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.2,<3.0a0 - python >=3.12,<3.13.0a0 @@ -14697,35 +14803,36 @@ packages: license: HPND purls: - pkg:pypi/pillow?source=hash-mapping - size: 42689452 - timestamp: 1726075285193 + size: 41948418 + timestamp: 1729065846594 - kind: conda name: pillow - version: 10.4.0 - build: py312h683ea77_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.4.0-py312h683ea77_1.conda - sha256: 1e8d489190aa0b4682f52468efe4db46b37e50679c64879696e42578c9a283a4 - md5: fb17ec3065f089dad64d9b597b1e8ce4 + version: 11.0.0 + build: py312ha41cd45_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pillow-11.0.0-py312ha41cd45_0.conda + sha256: 8802bcab3b587cec7dfa8e6a82e9851d16dffff64052282d993adf2d1cade0ef + md5: 812f37d90c99f24705d2db3091c9c29c depends: - - __osx >=10.13 - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - libjpeg-turbo >=3.0.0,<4.0a0 - - libtiff >=4.6.0,<4.8.0a0 + - libtiff >=4.7.0,<4.8.0a0 - libwebp-base >=1.4.0,<2.0a0 - - libxcb >=1.16,<2.0.0a0 + - libxcb >=1.17.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - openjpeg >=2.5.2,<3.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: HPND purls: - pkg:pypi/pillow?source=hash-mapping - size: 42329265 - timestamp: 1726075276862 + size: 41230881 + timestamp: 1729066337278 - kind: conda name: pip version: '24.2' @@ -15000,18 +15107,18 @@ packages: - kind: conda name: postgresql version: '17.0' - build: h1122569_2 - build_number: 2 + build: h1122569_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_2.conda - sha256: dcc5bc7cfda64d43541f2b22542df4d83f0c55790c982bae5349cec76e824bd9 - md5: ce0728ee5b5a1a7b06d947cbfd72aab0 + url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_4.conda + sha256: 83565b4966d86d39b8628f9137d0918fac8ae2f3241a48da145be444426ed057 + md5: 028ea131f116f13bb2a4a382b5863a04 depends: - __glibc >=2.17,<3.0.a0 - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - libgcc >=13 - - libpq 17.0 h04577a9_2 + - libpq 17.0 h04577a9_4 - libxml2 >=2.12.7,<3.0a0 - libxslt >=1.1.39,<2.0a0 - libzlib >=1.3.1,<2.0a0 @@ -15024,22 +15131,22 @@ packages: - zstd >=1.5.6,<1.6.0a0 license: PostgreSQL purls: [] - size: 5537219 - timestamp: 1727852846436 + size: 5553442 + timestamp: 1729085209688 - kind: conda name: postgresql version: '17.0' - build: h13864a0_2 - build_number: 2 + build: h88d1a9c_4 + build_number: 4 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h13864a0_2.conda - sha256: 5d47afd1c0aa5b24965f41c5edb0ba65787891b58bc22addeec3cfa6a8c21c9f - md5: 88defb9d344f0d39805f8c819f211f79 + url: https://conda.anaconda.org/conda-forge/osx-64/postgresql-17.0-h88d1a9c_4.conda + sha256: 0088b9848988d6cb448d5d5c5888f3179bc429c50548814c7d60e8489d2c5b68 + md5: 3a7fd43a8762553353c8f9445518f218 depends: - __osx >=10.13 - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - libpq 17.0 h6e894e8_2 + - libpq 17.0 ha324e28_4 - libxml2 >=2.12.7,<3.0a0 - libxslt >=1.1.39,<2.0a0 - libzlib >=1.3.1,<2.0a0 @@ -15052,21 +15159,21 @@ packages: - zstd >=1.5.6,<1.6.0a0 license: PostgreSQL purls: [] - size: 4823803 - timestamp: 1727853241659 + size: 4825798 + timestamp: 1729085791983 - kind: conda name: postgresql version: '17.0' - build: heca7946_2 - build_number: 2 + build: heca7946_4 + build_number: 4 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_2.conda - sha256: 54c66acd351fae8aee1ae718c7474b212f9cb3181c492377b4aa632e174758e2 - md5: 6cca6d21f8c09ff8d860d02464f9b0fd + url: https://conda.anaconda.org/conda-forge/win-64/postgresql-17.0-heca7946_4.conda + sha256: 621ed28d32d6cfe727ed28893dc4d1f80859d50c596b7b4b2eec269f84b0924a + md5: c8d62d44f513fd07a3496f96ea470957 depends: - icu >=75.1,<76.0a0 - krb5 >=1.21.3,<1.22.0a0 - - libpq 17.0 h7ec079e_2 + - libpq 17.0 h7ec079e_4 - libxml2 >=2.12.7,<3.0a0 - libxslt >=1.1.39,<2.0a0 - libzlib >=1.3.1,<2.0a0 @@ -15078,8 +15185,8 @@ packages: - zstd >=1.5.6,<1.6.0a0 license: PostgreSQL purls: [] - size: 4317261 - timestamp: 1727853708062 + size: 4320392 + timestamp: 1729086048634 - kind: conda name: pre-commit version: 4.0.1 @@ -15210,63 +15317,63 @@ packages: - kind: conda name: psutil version: 6.0.0 - build: py312h4389bb4_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_1.conda - sha256: fc16b9c6a511a6c127d7d6b973771be14266aaa8a3069abbf0b70727e1ab8394 - md5: 6847f7375068f9ef7d22ca7cb1055f31 + build: py312h3d0f464_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312h3d0f464_2.conda + sha256: 10d4040306ffc5898f9f2fbf3b186407228000663bc9d81a1d5a81a25948b347 + md5: 594bcf193fa2332ab54695f889ff3d8f depends: + - __osx >=10.13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/psutil?source=hash-mapping - size: 506867 - timestamp: 1725738313194 + size: 498230 + timestamp: 1728965362876 - kind: conda name: psutil version: 6.0.0 - build: py312h66e93f0_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda - sha256: fae2f63dd668ab2e7b2813f826508ae2c83f43577eeef5acf304f736b327c5be - md5: 76706c73e315d21bede804514a39bccf + build: py312h4389bb4_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/psutil-6.0.0-py312h4389bb4_2.conda + sha256: 64089cd1055c24842b5fac385a3e92fb470a11502ea5fae1e40bc8be0c356738 + md5: 978c54a1e851f105a4f088b8cba75cc5 depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/psutil?source=hash-mapping - size: 493021 - timestamp: 1725738009896 + size: 507955 + timestamp: 1728965760353 - kind: conda name: psutil version: 6.0.0 - build: py312hb553811_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/psutil-6.0.0-py312hb553811_1.conda - sha256: ac711ad735ebfe9bc01d0d2c11ef56fe3f5a4e2499774b5e46eac44749adece7 - md5: b2395d1f7ceb250b13b65bd13c5558a2 + build: py312h66e93f0_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_2.conda + sha256: 918541e61922738259671f76f0eedd0b4c1f62f4f2e22a828da36597e32fcf6c + md5: e6d115113d912f9c2cc8cddddac20d61 depends: - - __osx >=10.13 + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause license_family: BSD purls: - pkg:pypi/psutil?source=hash-mapping - size: 499530 - timestamp: 1725737996873 + size: 492697 + timestamp: 1728965314890 - kind: conda name: pthread-stubs version: '0.4' @@ -15497,24 +15604,6 @@ packages: - pkg:pypi/pyarrow?source=hash-mapping size: 4645745 timestamp: 1722487499158 -- kind: conda - name: pyarrow-hotfix - version: '0.6' - build: pyhd8ed1ab_0 - subdir: noarch - noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda - sha256: 9b767969d059c106aac6596438a7e7ebd3aa1e2ff6553d4b7e05126dfebf4bd6 - md5: ccc06e6ef2064ae129fab3286299abda - depends: - - pyarrow >=0.14 - - python >=3.5 - license: Apache-2.0 - license_family: APACHE - purls: - - pkg:pypi/pyarrow-hotfix?source=hash-mapping - size: 13567 - timestamp: 1700596511761 - kind: conda name: pycparser version: '2.22' @@ -15745,21 +15834,22 @@ packages: timestamp: 1727771812389 - kind: conda name: pyparsing - version: 3.1.4 - build: pyhd8ed1ab_0 + version: 3.2.0 + build: pyhd8ed1ab_1 + build_number: 1 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda - sha256: 8714a83f1aeac278b3eb33c7cb880c95c9a5924e7a5feeb9e87e7d0837afa085 - md5: 4d91352a50949d049cf9714c8563d433 + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda + sha256: b846e3965cd106438cf0b9dc0de8d519670ac065f822a7d66862e9423e0229cb + md5: 035c17fbf099f50ff60bf2eb303b0a83 depends: - - python >=3.6 + - python >=3.9 license: MIT license_family: MIT purls: - pkg:pypi/pyparsing?source=hash-mapping - size: 90129 - timestamp: 1724616224956 + size: 92444 + timestamp: 1728880549923 - kind: conda name: pyproj version: 3.7.0 @@ -15825,21 +15915,21 @@ packages: timestamp: 1727795528667 - kind: conda name: pyside6 - version: 6.7.3 + version: 6.8.0 build: py312h2ee7485_1 build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.7.3-py312h2ee7485_1.conda - sha256: e3f3c1eb70a051f06c0598ac70fb9d5e770a116a56b399d48f4ce22a2c169e89 - md5: d55a97c0263d3f780726a76ae18498cc + url: https://conda.anaconda.org/conda-forge/win-64/pyside6-6.8.0-py312h2ee7485_1.conda + sha256: 1615b5acc08119c790529726cce19d3d2335ccaf0890714f07d597d6e8045212 + md5: 31d312fbe153d77e3d0a47a974d961ad depends: - - libclang13 >=19.1.0 + - libclang13 >=19.1.1 - libxml2 >=2.12.7,<3.0a0 - libxslt >=1.1.39,<2.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - qt6-main 6.7.3.* - - qt6-main >=6.7.3,<6.8.0a0 + - qt6-main 6.8.0.* + - qt6-main >=6.8.0,<6.9.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 @@ -15848,20 +15938,20 @@ packages: purls: - pkg:pypi/pyside6?source=hash-mapping - pkg:pypi/shiboken6?source=hash-mapping - size: 9211882 - timestamp: 1727987852185 + size: 9742987 + timestamp: 1729034841941 - kind: conda name: pyside6 - version: 6.7.3 + version: 6.8.0 build: py312h91f0f75_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.7.3-py312h91f0f75_1.conda - sha256: e9d26444e4a554a71e885017898b101d388855277b6604f3235e50b63cc66fe0 - md5: 64a74d686fd29fa04c4c313a688e2421 + url: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.8.0-py312h91f0f75_1.conda + sha256: 8b0aec3390f948bf4c9b87365f44e5b39cfc53f047571dbb43e7cbbdb195258a + md5: 81abe3bd7285eec2fe288045043fe419 depends: - __glibc >=2.17,<3.0.a0 - - libclang13 >=19.1.0 + - libclang13 >=19.1.1 - libegl >=1.7.0,<2.0a0 - libgcc >=13 - libgl >=1.7.0,<2.0a0 @@ -15871,15 +15961,15 @@ packages: - libxslt >=1.1.39,<2.0a0 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - - qt6-main 6.7.3.* - - qt6-main >=6.7.3,<6.8.0a0 + - qt6-main 6.8.0.* + - qt6-main >=6.8.0,<6.9.0a0 license: LGPL-3.0-only license_family: LGPL purls: - pkg:pypi/pyside6?source=hash-mapping - pkg:pypi/shiboken6?source=hash-mapping - size: 10458409 - timestamp: 1727987584620 + size: 10802906 + timestamp: 1729034892066 - kind: conda name: pysocks version: 1.7.1 @@ -16207,15 +16297,15 @@ packages: timestamp: 1706886944988 - kind: conda name: pywin32 - version: '306' - build: py312h53d5487_2 - build_number: 2 + version: '307' + build: py312h275cf98_3 + build_number: 3 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pywin32-306-py312h53d5487_2.conda - sha256: d0ff1cd887b626a125f8323760736d8fab496bf2a400e825cce55361e7631264 - md5: f44c8f35c3f99eca30d6f5b68ddb0f42 + url: https://conda.anaconda.org/conda-forge/win-64/pywin32-307-py312h275cf98_3.conda + sha256: 68f8781b83942b91dbc0df883f9edfd1a54a1e645ae2a97c48203ff6c2919de3 + md5: 1747fbbdece8ab4358b584698b19c44d depends: - - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 - ucrt >=10.0.20348.0 - vc >=14.2,<15 @@ -16224,17 +16314,16 @@ packages: license_family: PSF purls: - pkg:pypi/pywin32?source=hash-mapping - size: 6127499 - timestamp: 1695974557413 + size: 6032183 + timestamp: 1728636767192 - kind: conda name: pywinpty - version: 2.0.13 - build: py312h275cf98_1 - build_number: 1 + version: 2.0.14 + build: py312h275cf98_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.13-py312h275cf98_1.conda - sha256: a13cbe4c93ba756b36e85a5972b5902f89cc3a6cb09e8b65a542eb2e7426487a - md5: 7e164d22d6403d92b73dcacdeb6a5ff0 + url: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.14-py312h275cf98_0.conda + sha256: 20bc64c412b659b387ed12d73ca9138e4487abcfb3f1547b6d4cdb68753035e9 + md5: 0e0aac13d306f0b016f4c85cbfbf87be depends: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 @@ -16246,8 +16335,8 @@ packages: license_family: MIT purls: - pkg:pypi/pywinpty?source=hash-mapping - size: 212342 - timestamp: 1724951397416 + size: 210034 + timestamp: 1729202671199 - kind: conda name: pyyaml version: 6.0.2 @@ -16314,12 +16403,12 @@ packages: - kind: conda name: pyzmq version: 26.2.0 - build: py312h54d5c6a_2 - build_number: 2 + build: py312h1060d5c_3 + build_number: 3 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h54d5c6a_2.conda - sha256: 6c412ab7f2ff2f112f53888913a9505518789a9c6d39ba9ad57d26a26f1c1b96 - md5: de7dc71e825ef8745051e1439935a244 + url: https://conda.anaconda.org/conda-forge/osx-64/pyzmq-26.2.0-py312h1060d5c_3.conda + sha256: 880b10ebbc563164d24adf51d2166ddd54a368627dc546cf89abc3e9c935e23c + md5: fa167f6388357aeff8fd341b7bc9edd6 depends: - __osx >=10.13 - libcxx >=17 @@ -16331,17 +16420,17 @@ packages: license_family: BSD purls: - pkg:pypi/pyzmq?source=hash-mapping - size: 362401 - timestamp: 1725449326748 + size: 362749 + timestamp: 1728642592082 - kind: conda name: pyzmq version: 26.2.0 - build: py312hbf22597_2 - build_number: 2 + build: py312hbf22597_3 + build_number: 3 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda - sha256: a2431644cdef4111f7120565090114f52897e687e83c991bd76a3baef8de77c4 - md5: 44f46ddfdd01d242d2fff2d69a0d7cba + url: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_3.conda + sha256: bc303f9b11e04a515f79cd5ad3bfa0e84b9dfec76552626d6263b38789fe6678 + md5: 746ce19f0829ec3e19c93007b1a224d3 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -16354,17 +16443,17 @@ packages: license_family: BSD purls: - pkg:pypi/pyzmq?source=hash-mapping - size: 378667 - timestamp: 1725449078945 + size: 378126 + timestamp: 1728642454632 - kind: conda name: pyzmq version: 26.2.0 - build: py312hd7027bb_2 - build_number: 2 + build: py312hd7027bb_3 + build_number: 3 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_2.conda - sha256: b148a68de6fc13f7d760936f72a240bf49049ded5a55c3b372581a2f1ea83655 - md5: 4b52a5f41750f313d59704d09120a02f + url: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.2.0-py312hd7027bb_3.conda + sha256: 46a645f9482c9ca55716644dae85f6d3cf771b696379d1dd86841ca6007ee409 + md5: 1ff97de0753654c02e5195a710bbf05c depends: - libsodium >=1.0.20,<1.0.21.0a0 - python >=3.12,<3.13.0a0 @@ -16377,8 +16466,8 @@ packages: license_family: BSD purls: - pkg:pypi/pyzmq?source=hash-mapping - size: 360878 - timestamp: 1725449586300 + size: 360217 + timestamp: 1728642895644 - kind: conda name: qhull version: '2020.2' @@ -16431,13 +16520,12 @@ packages: timestamp: 1720814433486 - kind: conda name: qt6-main - version: 6.7.3 - build: h6e8976b_1 - build_number: 1 + version: 6.8.0 + build: h6e8976b_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda - sha256: f5e4cefa82edec73c9bfc99566391463aeb339cfae8446f9b3c7950fefec6555 - md5: f3234422a977b5d400ccf503ad55c5d1 + url: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.8.0-h6e8976b_0.conda + sha256: f21949a55d07f72f910b0256401ae7b666d04810d110236aee86063da7babc51 + md5: 6d1c5d2d904d24c17cbb538a95855a4e depends: - __glibc >=2.17,<3.0.a0 - alsa-lib >=1.2.12,<1.3.0a0 @@ -16491,21 +16579,20 @@ packages: - xorg-libxxf86vm >=1.1.5,<2.0a0 - zstd >=1.5.6,<1.6.0a0 constrains: - - qt 6.7.3 + - qt 6.8.0 license: LGPL-3.0-only license_family: LGPL purls: [] - size: 47378301 - timestamp: 1727940486113 + size: 51315820 + timestamp: 1728406028 - kind: conda name: qt6-main - version: 6.7.3 - build: hfb098fa_1 - build_number: 1 + version: 6.8.0 + build: hfb098fa_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.7.3-hfb098fa_1.conda - sha256: c10933396b409f74f05fe7036ddf2b129e219dd3939170c3ebb0fd0790cd14ac - md5: 3dd4b78a610e48def640c3c9acd0c7e7 + url: https://conda.anaconda.org/conda-forge/win-64/qt6-main-6.8.0-hfb098fa_0.conda + sha256: 71603164b962f50f663d7281f6c7c290be451e8cce399d4d91d86cfb156fd1d8 + md5: 053046ca73b71bbcc81c6dc114264d24 depends: - double-conversion >=3.3.0,<3.4.0a0 - harfbuzz >=9.0.0,<10.0a0 @@ -16526,12 +16613,12 @@ packages: - vc14_runtime >=14.29.30139 - zstd >=1.5.6,<1.6.0a0 constrains: - - qt 6.7.3 + - qt 6.8.0 license: LGPL-3.0-only license_family: LGPL purls: [] - size: 88587578 - timestamp: 1727941590323 + size: 93521358 + timestamp: 1728406725577 - kind: conda name: quarto version: 1.5.57 @@ -16594,14 +16681,15 @@ packages: timestamp: 1725910489963 - kind: conda name: quartodoc - version: 0.7.6 + version: 0.8.1 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.7.6-pyhd8ed1ab_0.conda - sha256: aa8b66a33d88ad834956ab4ec5993f0858ecbd243156cc45f6b1ab7443378617 - md5: 02da3ef68978e1d6d61ded2338b2ee8b + url: https://conda.anaconda.org/conda-forge/noarch/quartodoc-0.8.1-pyhd8ed1ab_0.conda + sha256: 95cb8a50720c2d5f87f36f971f6b1158343db45707f0f17e85e67d613a073992 + md5: 0341c90d53a9748db85c53194b448213 depends: + - black - click - griffe >=0.33 - importlib-metadata >=5.1.0 @@ -16619,8 +16707,8 @@ packages: license_family: MIT purls: - pkg:pypi/quartodoc?source=hash-mapping - size: 65713 - timestamp: 1724510613002 + size: 68703 + timestamp: 1729005402983 - kind: conda name: rasterio version: 1.4.1 @@ -16740,49 +16828,52 @@ packages: timestamp: 1727684612025 - kind: conda name: re2 - version: 2023.11.01 - build: h2fb0a26_0 + version: 2024.07.02 + build: h2fb0a26_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/re2-2023.11.01-h2fb0a26_0.conda - sha256: e38f014d9905a59a4deefb30f8c848ac1f829664795830d89bd0aea9cf0ee8f4 - md5: 6ec7183fedc2a75b1897cc29ffbd4288 + url: https://conda.anaconda.org/conda-forge/osx-64/re2-2024.07.02-h2fb0a26_1.conda + sha256: 49ec4ed6249efe9cda173745e036137f8de1f0b22edf9b0ca4f9c6409b2b68f9 + md5: aa8ea927cdbdf690efeae3e575716131 depends: - - libre2-11 2023.11.01 hd530cb8_0 + - libre2-11 2024.07.02 hd530cb8_1 license: BSD-3-Clause license_family: BSD purls: [] - size: 26834 - timestamp: 1728466533363 + size: 26864 + timestamp: 1728779054104 - kind: conda name: re2 - version: 2023.11.01 - build: h77b4e00_0 + version: 2024.07.02 + build: h77b4e00_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.11.01-h77b4e00_0.conda - sha256: 48a802f9498d467cf2f26dca8cf9be710934a6d20978bb8d90dabdf261cd4206 - md5: 1130d7820f70a485cbdda10e166d31de + url: https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h77b4e00_1.conda + sha256: c1721cb80f7201652fc9801f49c214c88aee835d957f2376e301bd40a8415742 + md5: 01093ff37c1b5e6bf9f17c0116747d11 depends: - - libre2-11 2023.11.01 hbbce691_0 + - libre2-11 2024.07.02 hbbce691_1 license: BSD-3-Clause license_family: BSD purls: [] - size: 26664 - timestamp: 1728466449788 + size: 26665 + timestamp: 1728778975855 - kind: conda name: re2 - version: 2023.11.01 - build: hd3b24a8_0 + version: 2024.07.02 + build: hd3b24a8_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/re2-2023.11.01-hd3b24a8_0.conda - sha256: af6521397c0a502cf8817d7a4d0f25b10090064ca4452b03919d101a5decdf3b - md5: 2724b2bc2ce3c8581212063682c7210e + url: https://conda.anaconda.org/conda-forge/win-64/re2-2024.07.02-hd3b24a8_1.conda + sha256: 5ac1c50d731c323bb52c78113792a71c5f8f060e5767c0a202120a948e0fc85b + md5: b4abdc84c969587219e7e759116a3e8b depends: - - libre2-11 2023.11.01 h4eb7d71_0 + - libre2-11 2024.07.02 h4eb7d71_1 license: BSD-3-Clause license_family: BSD purls: [] - size: 213974 - timestamp: 1728466677561 + size: 214858 + timestamp: 1728779526745 - kind: conda name: readline version: '8.2' @@ -16895,7 +16986,7 @@ packages: timestamp: 1598024297745 - kind: pypi name: ribasim - version: 2024.10.0 + version: 2024.11.0 path: ../Ribasim/python/ribasim sha256: c48692687129085ad19256cbf54c8df9853f6259b31997cc4a282fde86072751 requires_dist: @@ -17135,12 +17226,12 @@ packages: timestamp: 1728067495992 - kind: conda name: s2n - version: 1.5.4 - build: h1380c3d_0 + version: 1.5.5 + build: h3931f03_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.4-h1380c3d_0.conda - sha256: b5145c74e781511ea55dad60dbb45e1053be1543d2577b29f4b091c96f93a65a - md5: 4e63e4713ffc9cddc3d5d435b5853b93 + url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.5-h3931f03_0.conda + sha256: a6fa0afa836f8f26dea0abc180ca2549bb517932d9a88a121e707135d4bcb715 + md5: 334dba9982ab9f5d62033c61698a8683 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -17148,8 +17239,8 @@ packages: license: Apache-2.0 license_family: Apache purls: [] - size: 352989 - timestamp: 1728019508986 + size: 353081 + timestamp: 1728534228471 - kind: conda name: scikit-learn version: 1.5.2 @@ -17229,11 +17320,12 @@ packages: - kind: conda name: scipy version: 1.14.1 - build: py312h1f4e10d_0 + build: py312h337df96_1 + build_number: 1 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h1f4e10d_0.conda - sha256: 8f70ded1b7b469d61f6f7a580c541538a0275e05a0ca2def60cb95555d06e7e3 - md5: 075ca2339855d696007b35110b83d958 + url: https://conda.anaconda.org/conda-forge/win-64/scipy-1.14.1-py312h337df96_1.conda + sha256: d0a8b9e849ae53af5c8373d1429464e071fda3ee35accb77775757b330e0d340 + md5: 7d85322084d7262008c49c85d3079c50 depends: - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 @@ -17247,47 +17339,47 @@ packages: - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: BSD-3-Clause - license_family: BSD purls: - pkg:pypi/scipy?source=hash-mapping - size: 16013280 - timestamp: 1724329197087 + size: 16143541 + timestamp: 1729482531384 - kind: conda name: scipy version: 1.14.1 - build: py312h7d485d2_0 + build: py312h62794b6_1 + build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda - sha256: 79903e307183e08b19c7ef607672fd304ed4968b2a7530904147aa79536e70d1 - md5: 7418a22e73008356d9aba99d93dfeeee + url: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda + sha256: d069a64edade554261672d8febf4756aeb56a6cb44bd91844eaa944e5d9f4eb9 + md5: b43233a9e2f62fb94affe5607ea79473 depends: - __glibc >=2.17,<3.0.a0 - libblas >=3.9.0,<4.0a0 - libcblas >=3.9.0,<4.0a0 - - libgcc-ng >=13 - - libgfortran-ng + - libgcc >=13 + - libgfortran - libgfortran5 >=13.3.0 - liblapack >=3.9.0,<4.0a0 - - libstdcxx-ng >=13 + - libstdcxx >=13 - numpy <2.3 - numpy >=1.19,<3 - numpy >=1.23.5 - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause - license_family: BSD purls: - pkg:pypi/scipy?source=hash-mapping - size: 17700161 - timestamp: 1724328333870 + size: 17622722 + timestamp: 1729481826601 - kind: conda name: scipy version: 1.14.1 - build: py312he82a568_0 + build: py312h888eae2_1 + build_number: 1 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312he82a568_0.conda - sha256: 21339aad0646f5c841ded61a2dae6fa46cef86d691098fd6160c5311e0a86454 - md5: dd3c55da62964fcadf27771e1928e67f + url: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.14.1-py312h888eae2_1.conda + sha256: 5a28ea91c935513e6c5f64baac5a02ce43d9ba183b98e20127220b207ec96529 + md5: ee7a4ffe9742d2df44caa858b36814b8 depends: - __osx >=10.13 - libblas >=3.9.0,<4.0a0 @@ -17302,11 +17394,10 @@ packages: - python >=3.12,<3.13.0a0 - python_abi 3.12.* *_cp312 license: BSD-3-Clause - license_family: BSD purls: - pkg:pypi/scipy?source=hash-mapping - size: 16322022 - timestamp: 1724328432301 + size: 16032291 + timestamp: 1729481615781 - kind: conda name: send2trash version: 1.8.3 @@ -17926,89 +18017,93 @@ packages: - kind: conda name: tiledb version: 2.26.2 - build: h1b23fdf_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-h1b23fdf_0.conda - sha256: d394b829556f98f98528be3356ac4c5e8c5859f77b09ae6ce32efa68bbe884f2 - md5: af186cb9c0de2d3d00dc6fdffe4a7137 + build: h19c5691_2 + build_number: 2 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h19c5691_2.conda + sha256: 986ef23289e65d93f5a4144ee8a37ebdca7e475a387f9f5ea520d2db4ae59ca7 + md5: b7d27e4f31b2aca4bbb446638c2b2573 depends: - - __osx >=10.13 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - azure-identity-cpp >=1.10.0,<1.10.1.0a0 - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - bzip2 >=1.0.8,<2.0a0 - fmt >=11.0.2,<12.0a0 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 + - libcrc32c >=1.1.2,<1.2.0a0 - libcurl >=8.10.1,<9.0a0 - - libcxx >=17 - - libgoogle-cloud >=2.29.0,<2.30.0a0 - - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 + - libgoogle-cloud >=2.30.0,<2.31.0a0 + - libgoogle-cloud-storage >=2.30.0,<2.31.0a0 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.3.2,<4.0a0 - spdlog >=1.14.1,<1.15.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.3,<15 + - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 license: MIT license_family: MIT purls: [] - size: 3974130 - timestamp: 1727662107924 + size: 3123194 + timestamp: 1728671850573 - kind: conda name: tiledb version: 2.26.2 - build: h34a6a78_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/tiledb-2.26.2-h34a6a78_0.conda - sha256: 1bce3c5f3306f7d31ece1992e90fbba1bd3033b2d7e6bc18a459bace2d4d487f - md5: 26a64a1718f8bdc76a18389be0294aa5 + build: hcd2843e_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hcd2843e_2.conda + sha256: 3e424bc5c93d8c8f194761c4a7febdd097cc085720bfd6b84f223fe05ead80ce + md5: d8bcc88559c96b2e3513fff3dd02b3d9 depends: + - __glibc >=2.17,<3.0.a0 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - azure-identity-cpp >=1.10.0,<1.10.1.0a0 - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - bzip2 >=1.0.8,<2.0a0 - fmt >=11.0.2,<12.0a0 - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - - libcrc32c >=1.1.2,<1.2.0a0 - libcurl >=8.10.1,<9.0a0 - - libgoogle-cloud >=2.29.0,<2.30.0a0 - - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 + - libgcc >=13 + - libgoogle-cloud >=2.30.0,<2.31.0a0 + - libgoogle-cloud-storage >=2.30.0,<2.31.0a0 + - libstdcxx >=13 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 - openssl >=3.3.2,<4.0a0 - spdlog >=1.14.1,<1.15.0a0 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.40.33810 - zstd >=1.5.6,<1.6.0a0 license: MIT license_family: MIT purls: [] - size: 3114129 - timestamp: 1727662671400 + size: 4568795 + timestamp: 1728670892218 - kind: conda name: tiledb version: 2.26.2 - build: hedb9d39_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-hedb9d39_0.conda - sha256: 061966f73c60baecda32cb243dbb292033d4f921e776ef2e0a1d1f13efd29d5f - md5: 40304fbbc36f8dc937adf74cdf024925 + build: hcef368d_2 + build_number: 2 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/tiledb-2.26.2-hcef368d_2.conda + sha256: 4956f2eba5bbc793c0783c3f1266cdc319568cf7280124ff47984da314025409 + md5: be0f5bdda031b6c30014d6a0031e215f depends: - - __glibc >=2.17,<3.0.a0 + - __osx >=10.13 - aws-crt-cpp >=0.28.3,<0.28.4.0a0 - aws-sdk-cpp >=1.11.407,<1.11.408.0a0 - - azure-core-cpp >=1.13.0,<1.13.1.0a0 - - azure-identity-cpp >=1.9.0,<1.9.1.0a0 + - azure-core-cpp >=1.14.0,<1.14.1.0a0 + - azure-identity-cpp >=1.10.0,<1.10.1.0a0 - azure-storage-blobs-cpp >=12.13.0,<12.13.1.0a0 - azure-storage-common-cpp >=12.8.0,<12.8.1.0a0 - bzip2 >=1.0.8,<2.0a0 @@ -18016,10 +18111,9 @@ packages: - libabseil * cxx17* - libabseil >=20240722.0,<20240723.0a0 - libcurl >=8.10.1,<9.0a0 - - libgcc >=13 - - libgoogle-cloud >=2.29.0,<2.30.0a0 - - libgoogle-cloud-storage >=2.29.0,<2.30.0a0 - - libstdcxx >=13 + - libcxx >=17 + - libgoogle-cloud >=2.30.0,<2.31.0a0 + - libgoogle-cloud-storage >=2.30.0,<2.31.0a0 - libwebp-base >=1.4.0,<2.0a0 - libzlib >=1.3.1,<2.0a0 - lz4-c >=1.9.3,<1.10.0a0 @@ -18029,8 +18123,8 @@ packages: license: MIT license_family: MIT purls: [] - size: 4572331 - timestamp: 1727662399128 + size: 3975884 + timestamp: 1728671392205 - kind: conda name: tinycss2 version: 1.3.0 @@ -18316,21 +18410,21 @@ packages: timestamp: 1727940306315 - kind: conda name: types-requests - version: 2.32.0.20240914 + version: 2.32.0.20241016 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240914-pyhd8ed1ab_0.conda - sha256: 586def52571620788bc095766f60c9a56b448a492a360e4ab1471ca73bdd21fa - md5: f3cf0cb8f627fa3dff2093a8546074fa + url: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20241016-pyhd8ed1ab_0.conda + sha256: b2c58c36589a7670e4131f96fbe779cb943c0acb01192ae0f0e3954d1257cbd4 + md5: 5569933ebb375e7b561bdf64062c1658 depends: - python >=3.8 - urllib3 >=2 license: Apache-2.0 AND MIT purls: - pkg:pypi/types-requests?source=hash-mapping - size: 26329 - timestamp: 1726293973214 + size: 26313 + timestamp: 1729102626972 - kind: conda name: typing-extensions version: 4.12.2 @@ -18697,13 +18791,13 @@ packages: timestamp: 1728401055788 - kind: conda name: virtualenv - version: 20.26.6 + version: 20.27.0 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda - sha256: 23128da47bc0b42b0fef0d41efc10d8ea1fb8232f0846bc4513eeba866f20d13 - md5: a7aa70aa30c47aeb84672621a85a4ef8 + url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda + sha256: 18bae5ff9f02793ca56d295f0a5f1d4443623ee3be09a6805eb7d4b18245968c + md5: a6ed1227ba6ec37cfc2b25e6512f729f depends: - distlib <1,>=0.3.7 - filelock <4,>=3.12.2 @@ -18713,8 +18807,8 @@ packages: license_family: MIT purls: - pkg:pypi/virtualenv?source=hash-mapping - size: 4875601 - timestamp: 1727513873376 + size: 2952166 + timestamp: 1729243861344 - kind: conda name: vs2015_runtime version: 14.40.33810 @@ -19526,12 +19620,12 @@ packages: - kind: conda name: xorg-libxxf86vm version: 1.1.5 - build: hb9d3cd8_3 - build_number: 3 + build: hb9d3cd8_4 + build_number: 4 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_3.conda - sha256: c84404835e6f7985faa645a333bd17c6259a2b1627177db471010db9308b5d52 - md5: 2159fc3619590b4f62473b6b9631549f + url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda + sha256: 0b8f062a5b4a2c3833267285b7d41b3542f54d2c935c86ca98504c3e5296354c + md5: 7da9007c0582712c4bad4131f89c8372 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 @@ -19540,8 +19634,8 @@ packages: license: MIT license_family: MIT purls: [] - size: 17943 - timestamp: 1727956927910 + size: 18072 + timestamp: 1728920051869 - kind: conda name: xorg-xorgproto version: '2024.1' diff --git a/scripts/notebooks/hydamo_0_analyse_data_waterboard.ipynb b/scripts/notebooks/hydamo_0_analyse_data_waterboard.ipynb index 2372830..7628456 100644 --- a/scripts/notebooks/hydamo_0_analyse_data_waterboard.ipynb +++ b/scripts/notebooks/hydamo_0_analyse_data_waterboard.ipynb @@ -12,14 +12,15 @@ "from pathlib import Path\n", "\n", "import pandas as pd\n", - "from hydamo.datamodel import HyDAMO\n", "from pandas_xlsx_tables import xlsx_tables_to_dfs\n", "from ribasim_lumping_tools.LHM_data_bewerking_analyse_utils import (\n", " check_ids_hydamo_data,\n", " check_if_object_on_hydroobject,\n", " read_original_data,\n", " translate_data_to_hydamo_format,\n", - ")" + ")\n", + "\n", + "from hydamo.datamodel import HyDAMO" ] }, { diff --git a/scripts/notebooks/ribasim_lumping_tools/run_ribasim_lumping_waterboard.py b/scripts/notebooks/ribasim_lumping_tools/run_ribasim_lumping_waterboard.py index cfcc927..3c96e19 100644 --- a/scripts/notebooks/ribasim_lumping_tools/run_ribasim_lumping_waterboard.py +++ b/scripts/notebooks/ribasim_lumping_tools/run_ribasim_lumping_waterboard.py @@ -11,6 +11,7 @@ from pathlib import Path import pandas as pd + from ribasim_lumping import create_ribasim_lumping_network warnings.simplefilter("ignore") diff --git a/scripts/notebooks/xxxx_combine_waterschap_layers.ipynb b/scripts/notebooks/xxxx_combine_waterschap_layers.ipynb index 12cc87b..8e6ce6c 100644 --- a/scripts/notebooks/xxxx_combine_waterschap_layers.ipynb +++ b/scripts/notebooks/xxxx_combine_waterschap_layers.ipynb @@ -13,6 +13,7 @@ "import matplotlib.pyplot as plt\n", "import numpy as np\n", "import pandas as pd\n", + "\n", "from ribasim_lumping.utils.general_functions import remove_holes_from_polygons" ] }, diff --git a/src/peilbeheerst_model/01_parse_crossings.ipynb b/src/peilbeheerst_model/01_parse_crossings.ipynb deleted file mode 100644 index d6c7eb9..0000000 --- a/src/peilbeheerst_model/01_parse_crossings.ipynb +++ /dev/null @@ -1,209 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "import json\n", - "import pathlib\n", - "\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import pandas as pd\n", - "from IPython.core.display import HTML\n", - "from peilbeheerst_model import ParseCrossings" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "with open(\"waterschappen.json\") as f:\n", - " waterschap_data = json.load(f)\n", - "\n", - "print_df = {}\n", - "for waterschap, waterschap_struct in waterschap_data.items():\n", - " for funcname, func_args in waterschap_struct.items():\n", - " if funcname not in print_df:\n", - " print_df[funcname] = []\n", - " print_df[funcname].append(pd.Series(func_args, name=waterschap))\n", - "\n", - "for funcname, df in print_df.items():\n", - " print(HTML(f\"

Function {funcname}:

\"))\n", - " print(pd.DataFrame(df))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "for waterschap, waterschap_struct in waterschap_data.items():\n", - " print(f\"\\n{waterschap}...\")\n", - "\n", - " init_settings, crossing_settings = waterschap_struct.values()\n", - " init_settings[\"logfile\"] = pathlib.Path(init_settings[\"output_path\"]).with_suffix(\"\").with_suffix(\".log\")\n", - "\n", - " if waterschap not in [\"HHNK\"]:\n", - " continue\n", - "\n", - " # if pathlib.Path(init_settings[\"output_path\"]).exists() and \"crossings_hydroobject\" in fiona.listlayers(init_settings[\"output_path\"]):\n", - " # continue\n", - "\n", - " # Crossings class initializeren\n", - " cross = ParseCrossings(**init_settings)\n", - "\n", - " # Crossings bepalen en wegschrijven\n", - " if crossing_settings[\"filterlayer\"] is None:\n", - " df_hydro = cross.find_crossings_with_peilgebieden(\"hydroobject\", **crossing_settings)\n", - " cross.write_crossings(df_hydro)\n", - " else:\n", - " df_hydro, df_dsf, df_hydro_dsf = cross.find_crossings_with_peilgebieden(\"hydroobject\", **crossing_settings)\n", - " cross.write_crossings(df_hydro, crossing_settings[\"filterlayer\"], df_dsf, df_hydro_dsf)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "plt.close(\"all\")\n", - "fig1, ax1 = plt.subplots(figsize=(12, 7.4), dpi=100)\n", - "fig2, ax2 = plt.subplots(figsize=(12, 7.4), dpi=100)\n", - "\n", - "for ax in [ax1, ax2]:\n", - " ax.spines[\"top\"].set_visible(False)\n", - " ax.spines[\"right\"].set_visible(False)\n", - " ax.spines[\"left\"].set_visible(False)\n", - " ax.spines[\"bottom\"].set_color(\"#dddddd\")\n", - " ax.tick_params(bottom=False, left=False)\n", - " ax.yaxis.grid(True, color=\"#eeeeee\")\n", - " ax.xaxis.grid(False)\n", - "\n", - "waterschappen = []\n", - "network_results = {\"Basins\": [], \"Edges\": [], \"Peilgebieden\": []}\n", - "# reduction_results = {\"initial\": [], \"in_use\": [], \"agg_links_in_use\": [], \"agg_areas_in_use\": []}\n", - "reduction_results = {\"in_use\": [], \"agg_links_in_use\": [], \"agg_areas_in_use\": []}\n", - "for waterschap, waterschap_struct in waterschap_data.items():\n", - " init_settings, crossing_settings = waterschap_struct.values()\n", - " df = gpd.read_file(init_settings[\"output_path\"], layer=\"crossings_hydroobject_filtered\")\n", - "\n", - " basins, edges, peilgebieden = None, None, None\n", - " init_cross, cross_inuse, cross_agglinks, cross_aggareas = None, None, None, None\n", - " try:\n", - " sub_df = df[df.agg_areas_in_use].copy()\n", - " all_nodes = np.hstack([sub_df.agg_area_from.to_numpy(), sub_df.agg_area_to.to_numpy()])\n", - " basins = len(np.unique(all_nodes[~pd.isna(all_nodes)]))\n", - " edges = len(sub_df) * 2\n", - " all_peilgebieden = np.hstack([sub_df.peilgebied_from.to_numpy(), sub_df.peilgebied_to.to_numpy()])\n", - " peilgebieden = len(np.unique(all_peilgebieden[~pd.isna(all_peilgebieden)]))\n", - "\n", - " init_cross = len(df)\n", - " cross_inuse = len(df[df.in_use])\n", - " cross_agglinks = len(df[df.agg_links_in_use])\n", - " cross_aggareas = len(df[df.agg_areas_in_use])\n", - " except Exception as e:\n", - " print(f\"{waterschap=}, {e=}\")\n", - "\n", - " # reduction_results[\"initial\"].append(init_cross)\n", - " reduction_results[\"in_use\"].append(cross_inuse)\n", - " reduction_results[\"agg_links_in_use\"].append(cross_agglinks)\n", - " reduction_results[\"agg_areas_in_use\"].append(cross_aggareas)\n", - "\n", - " network_results[\"Basins\"].append(basins)\n", - " network_results[\"Edges\"].append(edges)\n", - " network_results[\"Peilgebieden\"].append(peilgebieden)\n", - " waterschappen.append(waterschap)\n", - "\n", - "colours = [\"#0C3B5D\", \"#3EC1CD\", \"#EF3A4C\", \"#FCB94D\"]\n", - "\n", - "x1 = np.arange(len(waterschappen))\n", - "width = 1 / (1 + len(network_results))\n", - "multiplier = 0\n", - "for multiplier, (attribute, measurement) in enumerate(network_results.items()):\n", - " offset = width * multiplier\n", - " rects = ax1.bar(x1 + offset, measurement, width, label=attribute, color=colours[multiplier])\n", - " # ax1.bar_label(rects, padding=3)\n", - "ax1.set_axisbelow(True)\n", - "ax1.set_xticks(x1 + width, waterschappen, rotation=45)\n", - "ax1.legend(loc=\"upper left\", ncols=len(network_results))\n", - "\n", - "\n", - "x2 = np.arange(len(waterschappen))\n", - "width = 1 / (1 + len(reduction_results))\n", - "for multiplier, (attribute, measurement) in enumerate(reduction_results.items()):\n", - " offset = width * multiplier\n", - " rects = ax2.bar(x2 + offset, measurement, width, label=attribute, color=colours[multiplier])\n", - " # ax2.bar_label(rects, padding=3)\n", - "ax2.set_axisbelow(True)\n", - "ax2.set_xticks(x2 + width, waterschappen, rotation=45)\n", - "ax2.legend(loc=\"upper left\", ncols=len(reduction_results))\n", - "\n", - "fig1.tight_layout()\n", - "fig2.tight_layout()\n", - "\n", - "fig1.savefig(\"network_results.jpeg\", bbox_inches=\"tight\")\n", - "fig2.savefig(\"reduction_results.jpeg\", bbox_inches=\"tight\")\n", - "\n", - "print(pd.DataFrame(reduction_results, index=waterschappen))\n", - "print(pd.DataFrame(network_results, index=waterschappen))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/01_parse_crossings.py b/src/peilbeheerst_model/01_parse_crossings.py new file mode 100644 index 0000000..e0ff7b2 --- /dev/null +++ b/src/peilbeheerst_model/01_parse_crossings.py @@ -0,0 +1,128 @@ +# %% +import pathlib + +import geopandas as gpd +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd + +from peilbeheerst_model import ParseCrossings, waterschap_data + +# %% + +print_df = {} +for waterschap, waterschap_struct in waterschap_data.items(): + for funcname, func_args in waterschap_struct.items(): + if funcname not in print_df: + print_df[funcname] = [] + print_df[funcname].append(pd.Series(func_args, name=waterschap)) + +for funcname, df in print_df.items(): + print(f"Function {funcname}:") + print(pd.DataFrame(df)) + +# %% +for waterschap, waterschap_struct in waterschap_data.items(): + print(f"\n{waterschap}...") + + init_settings, crossing_settings = waterschap_struct.values() + init_settings["logfile"] = pathlib.Path(init_settings["output_path"]).with_suffix("").with_suffix(".log") + + if waterschap not in ["HHNK"]: + continue + + # if pathlib.Path(init_settings["output_path"]).exists() and "crossings_hydroobject" in fiona.listlayers(init_settings["output_path"]): + # continue + + # Crossings class initializeren + cross = ParseCrossings(**init_settings) + + # Crossings bepalen en wegschrijven + if crossing_settings["filterlayer"] is None: + df_hydro = cross.find_crossings_with_peilgebieden("hydroobject", **crossing_settings) + cross.write_crossings(df_hydro) + else: + df_hydro, df_dsf, df_hydro_dsf = cross.find_crossings_with_peilgebieden("hydroobject", **crossing_settings) + cross.write_crossings(df_hydro, crossing_settings["filterlayer"], df_dsf, df_hydro_dsf) + +# %% +plt.close("all") +fig1, ax1 = plt.subplots(figsize=(12, 7.4), dpi=100) +fig2, ax2 = plt.subplots(figsize=(12, 7.4), dpi=100) + +for ax in [ax1, ax2]: + ax.spines["top"].set_visible(False) + ax.spines["right"].set_visible(False) + ax.spines["left"].set_visible(False) + ax.spines["bottom"].set_color("#dddddd") + ax.tick_params(bottom=False, left=False) + ax.yaxis.grid(True, color="#eeeeee") + ax.xaxis.grid(False) + +waterschappen = [] +network_results = {"Basins": [], "Edges": [], "Peilgebieden": []} +# reduction_results = {"initial": [], "in_use": [], "agg_links_in_use": [], "agg_areas_in_use": []} +reduction_results = {"in_use": [], "agg_links_in_use": [], "agg_areas_in_use": []} +for waterschap, waterschap_struct in waterschap_data.items(): + init_settings, crossing_settings = waterschap_struct.values() + df = gpd.read_file(init_settings["output_path"], layer="crossings_hydroobject_filtered") + + basins, edges, peilgebieden = None, None, None + init_cross, cross_inuse, cross_agglinks, cross_aggareas = None, None, None, None + try: + sub_df = df[df.agg_areas_in_use].copy() + all_nodes = np.hstack([sub_df.agg_area_from.to_numpy(), sub_df.agg_area_to.to_numpy()]) + basins = len(np.unique(all_nodes[~pd.isna(all_nodes)])) + edges = len(sub_df) * 2 + all_peilgebieden = np.hstack([sub_df.peilgebied_from.to_numpy(), sub_df.peilgebied_to.to_numpy()]) + peilgebieden = len(np.unique(all_peilgebieden[~pd.isna(all_peilgebieden)])) + + init_cross = len(df) + cross_inuse = len(df[df.in_use]) + cross_agglinks = len(df[df.agg_links_in_use]) + cross_aggareas = len(df[df.agg_areas_in_use]) + except Exception as e: + print(f"{waterschap=}, {e=}") + + # reduction_results["initial"].append(init_cross) + reduction_results["in_use"].append(cross_inuse) + reduction_results["agg_links_in_use"].append(cross_agglinks) + reduction_results["agg_areas_in_use"].append(cross_aggareas) + + network_results["Basins"].append(basins) + network_results["Edges"].append(edges) + network_results["Peilgebieden"].append(peilgebieden) + waterschappen.append(waterschap) + +colours = ["#0C3B5D", "#3EC1CD", "#EF3A4C", "#FCB94D"] + +x1 = np.arange(len(waterschappen)) +width = 1 / (1 + len(network_results)) +multiplier = 0 +for multiplier, (attribute, measurement) in enumerate(network_results.items()): + offset = width * multiplier + rects = ax1.bar(x1 + offset, measurement, width, label=attribute, color=colours[multiplier]) + # ax1.bar_label(rects, padding=3) +ax1.set_axisbelow(True) +ax1.set_xticks(x1 + width, waterschappen, rotation=45) +ax1.legend(loc="upper left", ncols=len(network_results)) + + +x2 = np.arange(len(waterschappen)) +width = 1 / (1 + len(reduction_results)) +for multiplier, (attribute, measurement) in enumerate(reduction_results.items()): + offset = width * multiplier + rects = ax2.bar(x2 + offset, measurement, width, label=attribute, color=colours[multiplier]) + # ax2.bar_label(rects, padding=3) +ax2.set_axisbelow(True) +ax2.set_xticks(x2 + width, waterschappen, rotation=45) +ax2.legend(loc="upper left", ncols=len(reduction_results)) + +fig1.tight_layout() +fig2.tight_layout() + +fig1.savefig("network_results.jpeg", bbox_inches="tight") +fig2.savefig("reduction_results.jpeg", bbox_inches="tight") + +print(pd.DataFrame(reduction_results, index=waterschappen)) +print(pd.DataFrame(network_results, index=waterschappen)) diff --git a/src/peilbeheerst_model/01_test_parse_crossings.ipynb b/src/peilbeheerst_model/01_test_parse_crossings.ipynb index 55b8c3d..5ac586f 100644 --- a/src/peilbeheerst_model/01_test_parse_crossings.ipynb +++ b/src/peilbeheerst_model/01_test_parse_crossings.ipynb @@ -18,6 +18,7 @@ "import tqdm.auto as tqdm\n", "from IPython.core.display import HTML\n", "from matplotlib.patches import Polygon\n", + "\n", "from peilbeheerst_model import ParseCrossings" ] }, diff --git a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb index 5f1fe7c..574aa95 100644 --- a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb +++ b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb @@ -7,13 +7,13 @@ "metadata": {}, "outputs": [], "source": [ - "import json\n", "import pathlib\n", "\n", "import geopandas as gpd\n", "import pandas as pd\n", "from IPython.core.display import HTML\n", - "from peilbeheerst_model import ParseCrossings" + "\n", + "from peilbeheerst_model import ParseCrossings, waterschap_data" ] }, { @@ -23,9 +23,6 @@ "metadata": {}, "outputs": [], "source": [ - "with open(\"waterschappen.json\") as f:\n", - " waterschap_data = json.load(f)\n", - "\n", "print_df = {}\n", "for waterschap, waterschap_struct in waterschap_data.items():\n", " for funcname, func_args in waterschap_struct.items():\n", diff --git a/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb b/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb index b396128..3a3c832 100644 --- a/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb +++ b/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb @@ -13,9 +13,10 @@ "\n", "import load_ribasim # noqa: F401\n", "import pandas as pd\n", - "import peilbeheerst_model.ribasim_parametrization as ribasim_param\n", "import ribasim\n", "import ribasim.nodes\n", + "\n", + "import peilbeheerst_model.ribasim_parametrization as ribasim_param\n", "from peilbeheerst_model.add_storage_basins import AddStorageBasins\n", "from peilbeheerst_model.controle_output import *\n", "from peilbeheerst_model.ribasim_feedback_processor import RibasimFeedbackProcessor\n", diff --git a/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb b/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb index bdefec6..8e8a4c9 100644 --- a/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb +++ b/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -49,10 +51,8 @@ "source": [ "waterschap = \"Hollandse Delta\"\n", "\n", - "\n", "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb index 664b77e..33901f7 100644 --- a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb +++ b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -52,7 +54,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb b/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb index 201cdb6..0bc9400 100644 --- a/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb +++ b/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -51,7 +53,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb b/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb index a2229d2..b8b102b 100644 --- a/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb +++ b/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -51,7 +53,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb b/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb index b4a2b57..2fe53cc 100644 --- a/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb +++ b/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -51,7 +53,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb b/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb index d3ea281..1539e39 100644 --- a/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb +++ b/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -51,7 +53,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "print(data_path)\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb b/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb index 0919ec8..41b9297 100644 --- a/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb +++ b/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -51,7 +53,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb b/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb index 42395a6..a43b06c 100644 --- a/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb +++ b/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb @@ -29,7 +29,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -51,7 +53,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "\n", diff --git a/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb b/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb index fc3cd0f..358c521 100644 --- a/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb +++ b/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb @@ -30,7 +30,9 @@ "import tqdm.auto as tqdm\n", "from shapely.geometry import LineString, MultiLineString, Point\n", "from shapely.ops import split\n", - "from shapely.wkt import dumps" + "from shapely.wkt import dumps\n", + "\n", + "from peilbeheerst_model import waterschap_data" ] }, { @@ -52,7 +54,7 @@ "\n", "# Define crossings file path\n", "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap][\"output_path\"]\n", + "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", "\n", "# Load crossings file\n", diff --git a/src/peilbeheerst_model/peilbeheerst_model/__init__.py b/src/peilbeheerst_model/peilbeheerst_model/__init__.py index a370ec7..58608ef 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/__init__.py +++ b/src/peilbeheerst_model/peilbeheerst_model/__init__.py @@ -1,4 +1,6 @@ __version__ = "0.1.0" -# ruff: noqa from peilbeheerst_model.parse_crossings import ParseCrossings +from peilbeheerst_model.waterschappen import waterschap_data + +__all__ = ["ParseCrossings", "waterschap_data"] diff --git a/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py b/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py index 3081c62..2753b81 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py +++ b/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py @@ -7,10 +7,11 @@ import pandas as pd import ribasim from bokeh.palettes import Category10 -from ribasim_nl import CloudStorage from shapely.geometry import LineString, Point from shapely.wkt import loads +from ribasim_nl import CloudStorage + class CrossingsToRibasim: """Create a Ribasim network from crossings diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.ipynb deleted file mode 100644 index 675b020..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.ipynb +++ /dev/null @@ -1,453 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Amstel Gooi en Vecht" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS Notes:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "# from itertools import combinations\n", - "\n", - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Amstel Gooi en Vecht" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"AmstelGooienVecht\"\n", - "waterschap2 = \"AGV\"\n", - "\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap2}.gpkg\"\n", - "# \"Z:\\projects\\4750_20\\Data_postprocessed\\Waterschappen\\AmstelGooienVecht\\AGV.gpkg\"\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER//projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "\n", - "# Buffer boundaries\n", - "buffer_path = r\"/DATAFOLDER//projects/4750_30/Data_overig/HWS/hws_buffer_agv.gpkg\"\n", - "\n", - "# # Buffer RWHS\n", - "# rhws_path = f\"/DATAFOLDER//projects/4750_30/Data_overig/HWS/agv_rhws_buffer.gpkg\"\n", - "\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "### Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "AVG = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "AVG[\"peilgebied\"] = AVG[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)\n", - "gdf_buffer = gdf_buffer.to_crs(\"EPSG:28992\")\n", - "gdf_buffer = gdf_buffer.dissolve()\n", - "\n", - "# # Load rhws\n", - "# gdf_rhws = gpd.read_file(rhws_path)\n", - "# gdf_rhws = gdf_rhws.to_crs('EPSG:28992')\n", - "# gdf_rhws = gdf_rhws.dissolve()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "AVG[\"peilgebied\"].globalid.is_unique" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"HH Amstel, Gooi en Vecht\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")\n", - "\n", - "# # Use waterschap boudnaries to clip HWS layer\n", - "# gdf_rhws = gpd.overlay(gdf_grens, gdf_rhws, how='intersection')" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "## Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(AVG[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(AVG[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 500\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "for index, row in AVG[\"peilgebied\"].iterrows():\n", - " # if row.code == \"Oosterpark\" or row.code == \"Vechtboezem\":\n", - " if \"Oosterpark\" in row.code or \"Vechtboezem\" in row.code or \"Stadsboezem Amsterdam\" in row.code:\n", - " print(\"true\")\n", - "\n", - " peilgebieden_cat.append(1)\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - "# Add new column and drop old HWS_BZM column\n", - "AVG[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "## Add rhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# gdf_rhws['globalid'] = 'dummy_globalid_rhws_' + gdf_rhws.index.astype(str)\n", - "# gdf_rhws['code'] = 'dummy_code_nhws_' + gdf_rhws.index.astype(str)\n", - "# gdf_rhws['nen3610id'] = 'dummy_nen3610id_rhws_' + gdf_rhws.index.astype(str)\n", - "# gdf_rhws['peilgebied_cat'] = 1\n", - "\n", - "# gdf_rhws = gdf_rhws[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# AVG['peilgebied'] = pd.concat([gdf_rhws, AVG['peilgebied']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_hws = pd.DataFrame()\n", - "# streefpeil_hws['waterhoogte'] = [np.nan] * len(gdf_rhws)\n", - "# streefpeil_hws['globalid'] = 'dummy_globalid_rhws_' + gdf_rhws.index.astype(str)\n", - "# streefpeil_hws['geometry'] = [None]* len(gdf_rhws)\n", - "\n", - "# AVG['streefpeil'] = pd.concat([streefpeil_hws, AVG['streefpeil']])\n", - "# AVG['streefpeil'] = gpd.GeoDataFrame(AVG['streefpeil'])" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "AVG[\"peilgebied\"] = pd.concat([gdf_hws, AVG[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "AVG[\"streefpeil\"] = pd.concat([streefpeil_hws, AVG[\"streefpeil\"]])\n", - "AVG[\"streefpeil\"] = gpd.GeoDataFrame(AVG[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "### Create buffer polygon between NHWS and peilgebied/RHWS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0])\n", - "# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist()))\n", - "# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(AVG['peilgebied'].geometry.tolist()))\n", - "\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon = buffer_polygon.set_geometry(0)\n", - "# buffer_polygon = buffer_polygon.dissolve()\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon.set_geometry('geometry')\n", - "# buffer_polygon = buffer_polygon.set_crs('EPSG:28992')" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "### Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# AVG['peilgebied'] = pd.concat([buffer_polygon, AVG['peilgebied']])\n", - "# AVG['peilgebied'] = gpd.GeoDataFrame(AVG['peilgebied'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# AVG['streefpeil'] = pd.concat([streefpeil_buffer, AVG['streefpeil']])\n", - "# AVG['streefpeil'] = gpd.GeoDataFrame(AVG['streefpeil'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " AVG[\"peilgebied\"] = AVG[\"peilgebied\"].loc[AVG[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "## Store output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "for key in AVG.keys():\n", - " print(key)\n", - " AVG[str(key)].to_file(f\"{output_folder}/{waterschap2}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "AVG[\"peilgebied\"][\"peilgebied_cat\"].unique()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py new file mode 100644 index 0000000..ea285ed --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py @@ -0,0 +1,232 @@ +# Amstel Gooi en Vecht + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS Notes: + +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from general_functions import read_gpkg_layers + +from ribasim_nl import CloudStorage + +# %% + +remove_cat_2 = True + +waterschap = "AmstelGooienVecht" +waterschap2 = "AGV" + +# %% +cloud = CloudStorage() +cloud.download_verwerkt(waterschap) + +# cloud.download_basisgegevens() +# cloud.download_aangeleverd("Rijkswaterstaat") + +# %% +verwerkt_dir = cloud.joinpath(waterschap, "verwerkt") +data_path = verwerkt_dir / "preprocessed.gpkg" + +# Waterschaps boundaries +grens_path = cloud.joinpath("Basisgegevens/RWS_waterschaps_grenzen/waterschap.gpkg") + +# Hoofdwatersysteem boundaries +hws_path = cloud.joinpath("Rijkswaterstaat/verwerkt/krw_basins_vlakken.gpkg") + +# Buffer boundaries +buffer_path = cloud.joinpath("Rijkswaterstaat/verwerkt/hws_buffer_agv.gpkg") + +# Buffer RWHS +rhws_path = cloud.joinpath("Rijkswaterstaat/verwerkt/agv_rhws_buffer.gpkg") + + +# %% Load Files +# Load HHNK files +AVG = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +AVG["peilgebied"] = AVG["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) +gdf_buffer = gdf_buffer.to_crs("EPSG:28992") +gdf_buffer = gdf_buffer.dissolve() + +# Load rhws +# gdf_rhws = gpd.read_file(rhws_path) +# gdf_rhws = gdf_rhws.to_crs('EPSG:28992') +# gdf_rhws = gdf_rhws.dissolve() + +# %% +AVG["peilgebied"].globalid.is_unique + +# Select waterschap boundaries and clip hws layer + +# %% +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[gdf_grens["naam"].str.contains("Amstel, Gooi en Vecht")] +assert len(gdf_grens) == 1 + +# Use waterschap boundaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + +# Use waterschap boundaries to clip HWS layer +# gdf_rhws = gpd.overlay(gdf_grens, gdf_rhws, how='intersection') + +# Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + +# %% +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(AVG["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(AVG["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 500 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + +# %% +# Add occurence to geodataframe +peilgebieden_cat = [] + +for index, row in AVG["peilgebied"].iterrows(): + # if row.code == "Oosterpark" or row.code == "Vechtboezem": + if "Oosterpark" in row.code or "Vechtboezem" in row.code or "Stadsboezem Amsterdam" in row.code: + print("true") + + peilgebieden_cat.append(1) + else: + peilgebieden_cat.append(0) + +# Add new column and drop old HWS_BZM column +AVG["peilgebied"]["peilgebied_cat"] = peilgebieden_cat + +# %% Add rhws to ['peilgebied','streefpeil'] +# update peilgebied dict key +# gdf_rhws['globalid'] = 'dummy_globalid_rhws_' + gdf_rhws.index.astype(str) +# gdf_rhws['code'] = 'dummy_code_nhws_' + gdf_rhws.index.astype(str) +# gdf_rhws['nen3610id'] = 'dummy_nen3610id_rhws_' + gdf_rhws.index.astype(str) +# gdf_rhws['peilgebied_cat'] = 1 + +# gdf_rhws = gdf_rhws[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# AVG['peilgebied'] = pd.concat([gdf_rhws, AVG['peilgebied']]) + +# %% +# # Create boezem streefpeil layer +# streefpeil_hws = pd.DataFrame() +# streefpeil_hws['waterhoogte'] = [np.nan] * len(gdf_rhws) +# streefpeil_hws['globalid'] = 'dummy_globalid_rhws_' + gdf_rhws.index.astype(str) +# streefpeil_hws['geometry'] = [None]* len(gdf_rhws) + +# AVG['streefpeil'] = pd.concat([streefpeil_hws, AVG['streefpeil']]) +# AVG['streefpeil'] = gpd.GeoDataFrame(AVG['streefpeil']) + +# Add nhws to ['peilgebied','streefpeil'] + +# %% +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +AVG["peilgebied"] = pd.concat([gdf_hws, AVG["peilgebied"]]) + +# %% +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +AVG["streefpeil"] = pd.concat([streefpeil_hws, AVG["streefpeil"]]) +AVG["streefpeil"] = gpd.GeoDataFrame(AVG["streefpeil"]) + +# %% Create buffer polygon between NHWS and peilgebied/RHWS +# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0]) +# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist())) +# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(AVG['peilgebied'].geometry.tolist())) + +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon = buffer_polygon.set_geometry(0) +# buffer_polygon = buffer_polygon.dissolve() +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon.set_geometry('geometry') +# buffer_polygon = buffer_polygon.set_crs('EPSG:28992') + + +# %% Add buffer to ['peilgebied','streefpeil'] + +# update peilgebied dict key +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# AVG['peilgebied'] = pd.concat([buffer_polygon, AVG['peilgebied']]) +# AVG['peilgebied'] = gpd.GeoDataFrame(AVG['peilgebied']) + +# %% +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# streefpeil_buffer['geometry'] = [None] + + +# AVG['streefpeil'] = pd.concat([streefpeil_buffer, AVG['streefpeil']]) +# AVG['streefpeil'] = gpd.GeoDataFrame(AVG['streefpeil']) + +# %% +if remove_cat_2: + AVG["peilgebied"] = AVG["peilgebied"].loc[AVG["peilgebied"].peilgebied_cat != 2] + +# %% Store output + +output_gpkg_path = verwerkt_dir / "postprocessed.gpkg" + +for key in AVG.keys(): + print(key) + AVG[str(key)].to_file(output_gpkg_path, layer=str(key), driver="GPKG") + +cloud.upload_verwerkt(output_gpkg_path) +# %% +AVG["peilgebied"]["peilgebied_cat"].unique() diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb index 8c71596..8d72ecd 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb @@ -11,8 +11,9 @@ "import numpy as np\n", "import pandas as pd\n", "from general_functions import show_layers_and_columns, store_data\n", - "from ribasim_nl import CloudStorage\n", - "from shapely import wkt" + "from shapely import wkt\n", + "\n", + "from ribasim_nl import CloudStorage" ] }, { diff --git a/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py b/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py index 71b1048..b81a9b8 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py +++ b/src/peilbeheerst_model/peilbeheerst_model/ribasim_parametrization.py @@ -11,9 +11,10 @@ import pandas as pd import ribasim import tqdm.auto as tqdm -from ribasim_nl import CloudStorage from shapely.geometry import LineString +from ribasim_nl import CloudStorage + def get_current_max_nodeid(ribasim_model): # max_ids = [1] diff --git a/src/peilbeheerst_model/waterschappen.json b/src/peilbeheerst_model/peilbeheerst_model/waterschappen.py similarity index 76% rename from src/peilbeheerst_model/waterschappen.json rename to src/peilbeheerst_model/peilbeheerst_model/waterschappen.py index 7056032..c82bc7f 100644 --- a/src/peilbeheerst_model/waterschappen.json +++ b/src/peilbeheerst_model/peilbeheerst_model/waterschappen.py @@ -1,173 +1,142 @@ -{ - "HHNK": { +waterschap_data = { + "AmstelGooienVecht": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/HHNK/Noorderkwartier.gpkg", - "output_path": "../../../../Data_crossings/HHNK/hhnk_crossings_v26.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/AmstelGooienVecht/AGV.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/AmstelGooienVecht/agv_crossings_v05.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, "Delfland": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Delfland/Delfland.gpkg", - "output_path": "../../../../Data_crossings/Delfland/delfland_crossings_v08.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Delfland/Delfland.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/Delfland/delfland_crossings_v08.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, - "Hollandse Delta": { + "HHNK": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Hollandse_Delta/HD.gpkg", - "output_path": "../../../../Data_crossings/Hollandse_Delta/hd_crossings_v06.gpkg", - "search_radius_structure": 300, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/HHNK/Noorderkwartier.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/HHNK/hhnk_crossings_v26.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, - "AmstelGooienVecht": { + "HHSK": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/AmstelGooienVecht/AGV.gpkg", - "output_path": "../../../../Data_crossings/AmstelGooienVecht/agv_crossings_v05.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/HHSK/HHSK.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 - + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/HHSK/hhsk_crossings_v04.gpkg", + "search_radius_structure": 300, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, - "HHSK": { + "Hollandse Delta": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/HHSK/HHSK.gpkg", - "output_path": "../../../../Data_crossings/HHSK/hhsk_crossings_v04.gpkg", - "search_radius_structure": 300, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Hollandse_Delta/HD.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/Hollandse_Delta/hd_crossings_v06.gpkg", + "search_radius_structure": 300, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, "Rijnland": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Rijnland/Rijnland.gpkg", - "output_path": "../../../../Data_crossings/Rijnland/rijnland_crossings_v04.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Rijnland/Rijnland.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/Rijnland/rijnland_crossings_v04.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, "Scheldestromen": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Scheldestromen/Scheldestromen.gpkg", - "output_path": "../../../../Data_crossings/Scheldestromen/scheldestromen_crossings_v02.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Scheldestromen/Scheldestromen.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/Scheldestromen/scheldestromen_crossings_v02.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, - "Wetterskip": { + "WSRL": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip.gpkg", - "output_path": "../../../../Data_crossings/Wetterskip/wetterskip_crossings_v06.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/WSRL/WSRL.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/WSRL/wsrl_crossings_v06.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, - "WSRL": { + "Wetterskip": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/WSRL/WSRL.gpkg", - "output_path": "../../../../Data_crossings/WSRL/wsrl_crossings_v06.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/Wetterskip/wetterskip_crossings_v06.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } }, "Zuiderzeeland": { + "find_crossings_with_peilgebieden": {"filterlayer": "duikersifonhevel", "group_stacked": True}, "init": { - "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Zuiderzeeland/Zuiderzeeland.gpkg", - "output_path": "../../../../Data_crossings/Zuiderzeeland/zzl_crossings_v05.gpkg", - "search_radius_structure": 60, - "agg_peilgebieden_layer": "aggregation_area", "agg_peilgebieden_column": "code", - "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "agg_peilgebieden_layer": "aggregation_area", + "gpkg_path": "../../../../Data_postprocessed/Waterschappen/Zuiderzeeland/Zuiderzeeland.gpkg", "krw_column_id": "owmident", "krw_column_name": "owmnaam", - "krw_min_overlap": 0.025 + "krw_min_overlap": 0.025, + "krw_path": "../../../../Data_overig/KRW/KRW_lichamen_per_waterschap.gpkg", + "output_path": "../../../../Data_crossings/Zuiderzeeland/zzl_crossings_v05.gpkg", + "search_radius_structure": 60, }, - "find_crossings_with_peilgebieden": { - "filterlayer": "duikersifonhevel", - "group_stacked": true - } - } + }, } diff --git a/src/ribasim_nl/reset_index.py b/src/ribasim_nl/reset_index.py index cfb78b3..7f6a07b 100644 --- a/src/ribasim_nl/reset_index.py +++ b/src/ribasim_nl/reset_index.py @@ -1,6 +1,7 @@ # %% import pandas as pd from ribasim import Model + from ribasim_nl.case_conversions import pascal_to_snake_case diff --git a/src/ribasim_nl/tests/test_cloud.py b/src/ribasim_nl/tests/test_cloud.py index ed9b24f..3a6fc4b 100644 --- a/src/ribasim_nl/tests/test_cloud.py +++ b/src/ribasim_nl/tests/test_cloud.py @@ -1,4 +1,5 @@ import pytest + from ribasim_nl import CloudStorage from ribasim_nl.cloud import WATER_AUTHORITIES # noqa: F401 diff --git a/src/ribasim_nl/tests/test_network.py b/src/ribasim_nl/tests/test_network.py index 3288c4a..3bbbe15 100644 --- a/src/ribasim_nl/tests/test_network.py +++ b/src/ribasim_nl/tests/test_network.py @@ -3,9 +3,10 @@ import geopandas as gpd import pytest -from ribasim_nl import Network from shapely.geometry import LineString +from ribasim_nl import Network + @pytest.fixture def osm_lines_gpkg(): diff --git a/src/ribasim_nl/tests/test_tables.py b/src/ribasim_nl/tests/test_tables.py index 93bc783..a7145aa 100644 --- a/src/ribasim_nl/tests/test_tables.py +++ b/src/ribasim_nl/tests/test_tables.py @@ -1,4 +1,5 @@ import pandas as pd + from ribasim_nl.tables import average_width, cumulative_area, manning_profile diff --git a/stash/5_model_netwerk_old.py b/stash/5_model_netwerk_old.py index 8089ddb..2b77342 100644 --- a/stash/5_model_netwerk_old.py +++ b/stash/5_model_netwerk_old.py @@ -2,6 +2,7 @@ import geopandas as gpd import pandas as pd import ribasim + from ribasim_nl import CloudStorage, Network, reset_index from ribasim_nl.rating_curve import read_rating_curve from ribasim_nl.verdeelsleutels import ( diff --git a/stash/5b_upgrade_to_main.py b/stash/5b_upgrade_to_main.py index d16da7c..b5a43c5 100644 --- a/stash/5b_upgrade_to_main.py +++ b/stash/5b_upgrade_to_main.py @@ -7,6 +7,7 @@ import tomli_w from ribasim import Node from ribasim.nodes import linear_resistance + from ribasim_nl import CloudStorage diff --git a/stash/6_model_sturing copy.py b/stash/6_model_sturing copy.py index 1180146..0edbfb0 100644 --- a/stash/6_model_sturing copy.py +++ b/stash/6_model_sturing copy.py @@ -4,6 +4,7 @@ import pandas as pd import ribasim from ribasim import nodes + from ribasim_nl import CloudStorage, discrete_control from ribasim_nl.case_conversions import pascal_to_snake_case from ribasim_nl.model import add_control_node_to_network diff --git a/web_app/main.py b/web_app/main.py index 7bbfd44..7763669 100644 --- a/web_app/main.py +++ b/web_app/main.py @@ -3,6 +3,7 @@ import ribasim from bokeh.io import curdoc + from bokeh_helpers.widgets.map_figure_widget import MapFigure toml_file = next((i for i in sys.argv if i.lower().endswith(".toml")), None) From 99e906eed1e833dbc56e513cfade31556b8bfd11 Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Mon, 21 Oct 2024 14:12:54 +0200 Subject: [PATCH 17/23] Convert peilbeheerst notebooks to script (#171) This runs `pixi run jupyter nbconvert --to script path/to/notebook.ipynb` for all notebooks in `src\peilbeheerst_model\`. I find it much easier to refactor code when it is all in Python. But there are also some VSCode notebook bugs plaguing me, for instance scrambling imports on autoformat when I save them. I did some minor edits afterwards, e.g. removing `# In[]` comments and some ipython magic that is no longer needed. --- .../01_test_parse_crossings.ipynb | 600 --------- .../01_test_parse_crossings.py | 519 +++++++ .../01b_ad_krw_to_peilgebieden.ipynb | 101 -- .../01b_ad_krw_to_peilgebieden.py | 44 + .../02_crossings_to_ribasim_notebook.ipynb | 1194 ----------------- .../02_crossings_to_ribasim_notebook.py | 1015 ++++++++++++++ src/peilbeheerst_model/03_test_outlets.ipynb | 365 ----- src/peilbeheerst_model/03_test_outlets.py | 161 +++ .../AmstelGooienVecht_parametrize.ipynb | 572 -------- .../AmstelGooienVecht_parametrize.py | 272 ++++ .../01_shortest_path_Hollandse_Delta.ipynb | 584 -------- .../01_shortest_path_Hollandse_Delta.py | 458 +++++++ .../Shortest_path/02_shortest_path_HHSK.ipynb | 642 --------- .../Shortest_path/02_shortest_path_HHSK.py | 457 +++++++ .../Shortest_path/03_shortest_path_HHNK.ipynb | 576 -------- .../Shortest_path/03_shortest_path_HHNK.py | 457 +++++++ .../04_shortest_path_Delfland.ipynb | 569 -------- .../04_shortest_path_Delfland.py | 456 +++++++ .../05_shortest_path_Scheldestromen.ipynb | 611 --------- .../05_shortest_path_Scheldestromen.py | 456 +++++++ .../06_shortest_path_Zuiderzeeland.ipynb | 581 -------- .../06_shortest_path_Zuiderzeeland.py | 461 +++++++ .../Shortest_path/07_shortest_path_WSRL.ipynb | 591 -------- .../Shortest_path/07_shortest_path_WSRL.py | 456 +++++++ .../08_shortest_path_Wetterskip.ipynb | 592 -------- .../08_shortest_path_Wetterskip.py | 457 +++++++ .../09_shortest_path_Rijnland.ipynb | 594 -------- .../09_shortest_path_Rijnland.py | 460 +++++++ .../Shortest_path/10_shortest_path_AGV.ipynb | 909 ------------- .../Shortest_path/10_shortest_path_AGV.py | 701 ++++++++++ src/peilbeheerst_model/compute_voronoi.ipynb | 366 ----- src/peilbeheerst_model/compute_voronoi.py | 267 ++++ .../postprocess_data/post-process_WSRL.ipynb | 482 ------- .../postprocess_data/post-process_WSRL.py | 230 ++++ .../post-process_delfland.ipynb | 403 ------ .../postprocess_data/post-process_delfland.py | 204 +++ .../post-process_rijnland.ipynb | 545 -------- .../postprocess_data/post-process_rijnland.py | 294 ++++ .../post-process_wetterskip.ipynb | 536 -------- .../post-process_wetterskip.py | 285 ++++ .../post-process_zuiderzeeland.ipynb | 440 ------ .../post-process_zuiderzeeland.py | 215 +++ .../postprocess_data/post-processing_HD.ipynb | 447 ------ .../postprocess_data/post-processing_HD.py | 230 ++++ .../post-processing_HHNK.ipynb | 456 ------- .../postprocess_data/post-processing_HHNK.py | 225 ++++ .../post-processing_HHSK.ipynb | 508 ------- .../postprocess_data/post-processing_HHSK.py | 244 ++++ .../post-processing_scheldestromen.ipynb | 459 ------- .../post-processing_scheldestromen.py | 202 +++ .../preprocess_data/AmstelGooienVecht.ipynb | 345 ----- .../preprocess_data/AmstelGooienVecht.py | 188 +++ .../preprocess_data/Delfland.ipynb | 275 ---- .../preprocess_data/Delfland.py | 141 ++ .../preprocess_data/HHNK.ipynb | 374 ------ .../preprocess_data/HHNK.py | 190 +++ .../preprocess_data/HHSK.ipynb | 560 -------- .../preprocess_data/HHSK.py | 262 ++++ .../preprocess_data/Hollandse_Delta.ipynb | 298 ---- .../preprocess_data/Hollandse_Delta.py | 161 +++ .../preprocess_data/Rijnland.ipynb | 481 ------- .../preprocess_data/Rijnland.py | 171 +++ .../preprocess_data/Rivierenland.ipynb | 461 ------- .../preprocess_data/Rivierenland.py | 240 ++++ .../preprocess_data/Scheldestromen.ipynb | 346 ----- .../preprocess_data/Scheldestromen.py | 147 ++ .../preprocess_data/Wetterskip.ipynb | 409 ------ .../preprocess_data/Wetterskip.py | 212 +++ .../preprocess_data/Zuiderzeeland.ipynb | 334 ----- .../preprocess_data/Zuiderzeeland.py | 174 +++ 70 files changed, 11112 insertions(+), 17606 deletions(-) delete mode 100644 src/peilbeheerst_model/01_test_parse_crossings.ipynb create mode 100644 src/peilbeheerst_model/01_test_parse_crossings.py delete mode 100644 src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb create mode 100644 src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py delete mode 100644 src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb create mode 100644 src/peilbeheerst_model/02_crossings_to_ribasim_notebook.py delete mode 100644 src/peilbeheerst_model/03_test_outlets.ipynb create mode 100644 src/peilbeheerst_model/03_test_outlets.py delete mode 100644 src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb create mode 100644 src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.py delete mode 100644 src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py delete mode 100644 src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py delete mode 100644 src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py delete mode 100644 src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py delete mode 100644 src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py delete mode 100644 src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py delete mode 100644 src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py delete mode 100644 src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py delete mode 100644 src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py delete mode 100644 src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.ipynb create mode 100644 src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py delete mode 100644 src/peilbeheerst_model/compute_voronoi.ipynb create mode 100644 src/peilbeheerst_model/compute_voronoi.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.ipynb create mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py diff --git a/src/peilbeheerst_model/01_test_parse_crossings.ipynb b/src/peilbeheerst_model/01_test_parse_crossings.ipynb deleted file mode 100644 index 5ac586f..0000000 --- a/src/peilbeheerst_model/01_test_parse_crossings.ipynb +++ /dev/null @@ -1,600 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "import pathlib\n", - "import warnings\n", - "\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import shapely.geometry\n", - "import shapely.validation\n", - "import tqdm.auto as tqdm\n", - "from IPython.core.display import HTML\n", - "from matplotlib.patches import Polygon\n", - "\n", - "from peilbeheerst_model import ParseCrossings" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "polygons = {\n", - " \"perfect fit\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ]\n", - " },\n", - " \"perfect fit star 1\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (1, 0), (2, 1.5), (1.5, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(3, 0), (4, 0), (4, 2), (2.5, 2), (2, 1.5)]),\n", - " shapely.geometry.Polygon([(1, 0), (3, 0), (2, 1.5)]),\n", - " shapely.geometry.Polygon([(1.5, 2), (2.5, 2), (2, 1.5)]),\n", - " ]\n", - " },\n", - " \"perfect fit star 2\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (1, 0), (2, 0.5), (1.5, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(3, 0), (4, 0), (4, 2), (2.5, 2), (2, 0.5)]),\n", - " shapely.geometry.Polygon([(1, 0), (3, 0), (2, 0.5)]),\n", - " shapely.geometry.Polygon([(1.5, 2), (2.5, 2), (2, 0.5)]),\n", - " ]\n", - " },\n", - " \"perfect fit on edge\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 1.5), (0, 1.5)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ]\n", - " },\n", - " \"narrow gap\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2.1, 0), (4, 0), (4, 2), (2.1, 2)]),\n", - " ]\n", - " },\n", - " \"wide gap\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(3, 0), (4, 0), (4, 2), (3, 2)]),\n", - " ]\n", - " },\n", - " \"narrow overlap\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(1.9, 0), (4, 0), (4, 2), (1.9, 2)]),\n", - " ]\n", - " },\n", - " \"wide overlap\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(1, 0), (4, 0), (4, 2), (1, 2)]),\n", - " ]\n", - " },\n", - " \"single cross wide\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " ]\n", - " },\n", - " \"single cross narrow\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (3.4, 0), (3.4, 2), (0, 2)]),\n", - " ]\n", - " },\n", - " \"single cross at edge\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (3.5, 0), (3.5, 2), (0, 2)]),\n", - " ]\n", - " },\n", - " \"single cross on edge\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 1.5), (0, 1.5)]),\n", - " ]\n", - " },\n", - " \"perfect fit with complete overlap\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ]\n", - " },\n", - " \"single cross wide with complete overlap\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " ]\n", - " },\n", - " \"single cross at edge with complete overlap\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (3.5, 0), (3.5, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(0, 0), (3.5, 0), (3.5, 2), (0, 2)]),\n", - " ]\n", - " },\n", - " \"polygon within polygon 1\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (3.0, 0), (3.0, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(0.1, 0.1), (2.8, 0.1), (2.8, 1.9), (0.1, 1.9)]),\n", - " ]\n", - " },\n", - " \"polygon within polygon 2\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (3.4, 0), (3.4, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(0.1, 0.1), (2.8, 0.1), (2.8, 1.9), (0.1, 1.9)]),\n", - " ]\n", - " },\n", - " \"polygon within polygon 3\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (4.0, 0), (4.0, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(0.1, 0.1), (2.8, 0.1), (2.8, 1.9), (0.1, 1.9)]),\n", - " ]\n", - " },\n", - " \"polygon butterfly 1a\": {\n", - " \"peilgebieden\": [\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (4.0, 0.0), (4.0, 2.0), (2.0, 0.5), (0.0, 2.0)])\n", - " ),\n", - " ]\n", - " },\n", - " \"polygon butterfly 1b\": {\n", - " \"peilgebieden\": [\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (4.0, 0.0), (4.0, 2.0), (2.0, 0.5), (0.0, 2.0)])\n", - " ),\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (4.0, 0.0)]),\n", - " ]\n", - " },\n", - " \"polygon butterfly 2a\": {\n", - " \"peilgebieden\": [\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (4.0, 0.0), (4.0, 2.0), (2.0, 0.4), (0.0, 2.0)])\n", - " ),\n", - " ]\n", - " },\n", - " \"polygon butterfly 2b\": {\n", - " \"peilgebieden\": [\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (4.0, 0.0), (4.0, 2.0), (2.0, 0.4), (0.0, 2.0)])\n", - " ),\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (4.0, 0.0)]),\n", - " ]\n", - " },\n", - " \"polygon butterfly 2c\": {\n", - " \"peilgebieden\": [\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.6), (4.0, 0.0), (4.0, 2.0), (2.0, 0.6), (0.0, 2.0)])\n", - " ),\n", - " ]\n", - " },\n", - " \"polygon butterfly 2d\": {\n", - " \"peilgebieden\": [\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.6), (4.0, 0.0), (4.0, 2.0), (2.0, 0.6), (0.0, 2.0)])\n", - " ),\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.6), (4.0, 0.0)]),\n", - " ]\n", - " },\n", - " \"polygon butterfly 2e\": {\n", - " \"peilgebieden\": [\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.55), (4.0, 0.0), (4.0, 2.0), (2.0, 0.55), (0.0, 2.0)])\n", - " ),\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.55), (4.0, 0.0)]),\n", - " ]\n", - " },\n", - " \"polygon butterfly 3a\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (0.0, 2.0)]),\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (4.0, 0.0), (2.0, 0.5), (4.0, 2.0), (0.0, 2.0), (2.0, 0.5)])\n", - " ),\n", - " ]\n", - " },\n", - " \"polygon butterfly 3b\": {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (0.0, 2.0)]),\n", - " shapely.validation.make_valid(\n", - " shapely.geometry.Polygon([(0.0, 0.0), (4.0, 0.0), (2.0, 0.4), (4.0, 2.0), (0.0, 2.0), (2.0, 0.4)])\n", - " ),\n", - " ]\n", - " },\n", - "}\n", - "\n", - "linelist = [\n", - " shapely.geometry.LineString([(0.5, 0.5), (3.5, 0.5)]),\n", - " shapely.geometry.LineString([(0.5, 0.7), (2.0, 0.7)]),\n", - " shapely.geometry.LineString([(2.0, 0.7), (3.5, 0.7)]),\n", - " shapely.geometry.LineString([(0.5, 0.9), (2.0, 0.9)]),\n", - " shapely.geometry.LineString([(3.5, 0.9), (2.0, 0.9)]),\n", - " shapely.geometry.LineString([(3.8, 0.9), (3.5, 0.9)]),\n", - " shapely.geometry.LineString([(3.5, 1.5), (0.5, 1.5)]),\n", - "]\n", - "\n", - "filterlist = [\n", - " shapely.geometry.LineString([(1.0, 0.7), (2.0, 0.7)]),\n", - " shapely.geometry.LineString([(2.0, 0.7), (3.0, 0.7)]),\n", - " shapely.geometry.LineString([(1.0, 0.9), (3.0, 0.9)]),\n", - "]\n", - "\n", - "\n", - "nofilter = polygons.copy()\n", - "withfilter = polygons.copy()\n", - "for testname, options in nofilter.items():\n", - " options[\"hydroobjecten\"] = linelist.copy()\n", - " nofilter[testname] = options.copy()\n", - "\n", - " options[\"duikersifonhevel\"] = filterlist.copy()\n", - " withfilter[testname] = options.copy()\n", - "\n", - "\n", - "nofilter[\"driehoek 1a\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(0.5, 0.5), (2, 1)]),\n", - " shapely.geometry.LineString([(0.5, 1.5), (2, 1)]),\n", - " shapely.geometry.LineString([(2, 1), (3.5, 0.5)]),\n", - " ],\n", - "}\n", - "nofilter[\"driehoek 1b\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(0.5, 0.5), (2.1, 1)]),\n", - " shapely.geometry.LineString([(0.5, 1.5), (2.1, 1)]),\n", - " shapely.geometry.LineString([(2.1, 1), (3.5, 0.5)]),\n", - " ],\n", - "}\n", - "nofilter[\"driehoek 1c\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(0.5, 0.5), (1.9, 1)]),\n", - " shapely.geometry.LineString([(0.5, 1.5), (1.9, 1)]),\n", - " shapely.geometry.LineString([(1.9, 1), (3.5, 0.5)]),\n", - " ],\n", - "}\n", - "nofilter[\"driehoek 2\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(3.5, 1.5), (2, 1)]),\n", - " shapely.geometry.LineString([(3.5, 0.5), (2, 1)]),\n", - " shapely.geometry.LineString([(2, 1), (0.5, 0.5)]),\n", - " ],\n", - "}\n", - "nofilter[\"driehoek 3\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(3.5, 1.5), (2, 1)]),\n", - " shapely.geometry.LineString([(3.5, 0.5), (2, 1)]),\n", - " shapely.geometry.LineString([(0.5, 0.5), (2, 1)]),\n", - " ],\n", - "}\n", - "nofilter[\"volgorde groep 1\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]),\n", - " shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(0.5, 0.5), (2.1, 0.5)]),\n", - " shapely.geometry.LineString([(2.1, 0.5), (3.5, 0.5)]),\n", - " ],\n", - "}\n", - "nofilter[\"volgorde groep 2\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]),\n", - " shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(2.1, 0.5), (0.5, 0.5)]),\n", - " shapely.geometry.LineString([(3.5, 0.5), (2.1, 0.5)]),\n", - " ],\n", - "}\n", - "nofilter[\"volgorde groep 3\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]),\n", - " shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(0.5, 0.5), (2.1, 0.5)]),\n", - " shapely.geometry.LineString([(3.5, 0.5), (2.1, 0.5)]),\n", - " ],\n", - "}\n", - "nofilter[\"volgorde groep 4\"] = {\n", - " \"peilgebieden\": [\n", - " shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]),\n", - " shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]),\n", - " shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]),\n", - " ],\n", - " \"hydroobjecten\": [\n", - " shapely.geometry.LineString([(2.1, 0.5), (0.5, 0.5)]),\n", - " shapely.geometry.LineString([(2.1, 0.5), (3.5, 0.5)]),\n", - " ],\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "testdir = pathlib.Path(\"tests/data\")\n", - "if not testdir.exists():\n", - " testdir.mkdir()\n", - "\n", - "for testlist in tqdm.tqdm([nofilter, withfilter]):\n", - " for test_name, options in testlist.items():\n", - " polyid = [f\"poly_{i+1}\" for i in range(len(options[\"peilgebieden\"]))]\n", - " polywl = [float(i + 1) for i in range(len(options[\"peilgebieden\"]))]\n", - " # df_peil = gpd.GeoDataFrame(dict(globalid=polyid, geometry=options[\"peilgebieden\"]), crs=\"epsg:28992\")\n", - " # df_streef = gpd.GeoDataFrame(dict(globalid=polyid, waterhoogte=polywl, geometry=len(options[\"peilgebieden\"]) * [None]), crs=\"epsg:28992\")\n", - " # lineid = [f\"line_{i+1}\" for i in range(len(options[\"hydroobjecten\"]))]\n", - " # df_hydro = gpd.GeoDataFrame(dict(globalid=lineid, geometry=options[\"hydroobjecten\"]), crs=\"epsg:28992\")\n", - "\n", - " df_peil = gpd.GeoDataFrame({\"globalid\": polyid, \"geometry\": options[\"peilgebieden\"]}, crs=\"epsg:28992\")\n", - " df_streef = gpd.GeoDataFrame(\n", - " {\"globalid\": polyid, \"waterhoogte\": polywl, \"geometry\": len(options[\"peilgebieden\"]) * [None]},\n", - " crs=\"epsg:28992\",\n", - " )\n", - "\n", - " lineid = [f\"line_{i+1}\" for i in range(len(options[\"hydroobjecten\"]))]\n", - " df_hydro = gpd.GeoDataFrame({\"globalid\": lineid, \"geometry\": options[\"hydroobjecten\"]}, crs=\"epsg:28992\")\n", - "\n", - " if \"duikersifonhevel\" not in options:\n", - " # Empty filter\n", - " gpkg_path1 = testdir.joinpath(f\"nofilter_{test_name}.gpkg\")\n", - " df_hydro.to_file(gpkg_path1, layer=\"hydroobject\")\n", - " df_peil.to_file(gpkg_path1, layer=\"peilgebied\")\n", - " df_streef.to_file(gpkg_path1, layer=\"streefpeil\")\n", - " with warnings.catch_warnings():\n", - " warnings.simplefilter(action=\"ignore\", category=UserWarning)\n", - " gpd.GeoDataFrame(columns=[\"globalid\", \"geometry\"]).to_file(gpkg_path1, layer=\"stuw\")\n", - " gpd.GeoDataFrame(columns=[\"globalid\", \"geometry\"]).to_file(gpkg_path1, layer=\"gemaal\")\n", - " gpd.GeoDataFrame(columns=[\"globalid\", \"geometry\"]).to_file(gpkg_path1, layer=\"duikersifonhevel\")\n", - " else:\n", - " # With filter\n", - " gpkg_path2 = testdir.joinpath(f\"withfilter_{test_name}.gpkg\")\n", - " df_hydro.to_file(gpkg_path2, layer=\"hydroobject\")\n", - " df_peil.to_file(gpkg_path2, layer=\"peilgebied\")\n", - " df_streef.to_file(gpkg_path2, layer=\"streefpeil\")\n", - " polyfl = [f\"dsh_{i+1}\" for i in range(len(options[\"duikersifonhevel\"]))]\n", - " # df_filter = gpd.GeoDataFrame(dict(globalid=polyfl, geometry=options[\"duikersifonhevel\"]), crs=\"epsg:28992\")\n", - " df_filter = gpd.GeoDataFrame(\n", - " {\"globalid\": polyfl, \"geometry\": options[\"duikersifonhevel\"]}, crs=\"epsg:28992\"\n", - " )\n", - "\n", - " df_filter.to_file(gpkg_path2, layer=\"duikersifonhevel\")\n", - " with warnings.catch_warnings():\n", - " warnings.simplefilter(action=\"ignore\", category=UserWarning)\n", - " gpd.GeoDataFrame(columns=[\"globalid\", \"geometry\"]).to_file(gpkg_path2, layer=\"stuw\")\n", - " gpd.GeoDataFrame(columns=[\"globalid\", \"geometry\"]).to_file(gpkg_path2, layer=\"gemaal\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "def make_plot(df_peil, df_hydro, df_streef, df_filter, df_crossings):\n", - " plt.close(\"all\")\n", - " fig, ax = plt.subplots(figsize=(8, 5), dpi=100)\n", - "\n", - " # old_len = len(df_peil)\n", - " dfp = df_peil.copy().explode(index_parts=True)\n", - " dfs = df_streef.set_index(\"globalid\", inplace=False)\n", - " for i, row in enumerate(dfp.itertuples()):\n", - " coords = row.geometry.exterior.coords\n", - " # if old_len == len(dfp):\n", - " xtext = row.geometry.centroid.x - 0.5\n", - " ytext = row.geometry.centroid.y\n", - " # else:\n", - " # xtext = coords[0][0] + 0.05\n", - " # ytext = coords[0][1]\n", - " if (i % 2) == 0:\n", - " ytext += 0.05\n", - " else:\n", - " ytext -= 0.05\n", - "\n", - " ax.text(xtext, ytext, f\"{row.globalid}, wl={dfs.waterhoogte.at[row.globalid]}m\", alpha=0.5)\n", - " ax.add_patch(Polygon(coords, alpha=0.5, lw=1, facecolor=\"powderblue\", edgecolor=\"skyblue\"))\n", - "\n", - " dfh = df_hydro.explode(index_parts=True)\n", - " for row in dfh.itertuples():\n", - " coords = np.array(row.geometry.coords)\n", - " x, y = coords[:, 0], coords[:, 1]\n", - " offset = row.geometry.interpolate(0.1, normalized=True)\n", - " ax.arrow(\n", - " x[0],\n", - " y[0],\n", - " offset.x - x[0],\n", - " offset.y - y[0],\n", - " shape=\"full\",\n", - " lw=0,\n", - " length_includes_head=True,\n", - " head_width=0.05,\n", - " color=\"steelblue\",\n", - " )\n", - " ax.text(offset.x, offset.y + 0.05, row.globalid)\n", - " ax.plot(x, y, marker=\".\", markersize=5, lw=1, color=\"steelblue\")\n", - "\n", - " for row in df_crossings.itertuples():\n", - " if row.crossing_type == \"-10\":\n", - " ax.plot(row.geometry.x, row.geometry.y, marker=\"s\", markersize=5, color=\"olivedrab\")\n", - " elif row.crossing_type == \"00\":\n", - " ax.plot(row.geometry.x, row.geometry.y, marker=\"s\", markersize=5, color=\"indianred\")\n", - " else:\n", - " print(f\"{row.crossing_type=} not implemented\")\n", - " ax.plot(row.geometry.x, row.geometry.y, marker=\"s\", markersize=5, color=\"yellow\")\n", - "\n", - " dff = df_filter.explode(index_parts=True)\n", - " for row in dff.itertuples():\n", - " coords = np.array(row.geometry.coords)\n", - " x, y = coords[:, 0], coords[:, 1]\n", - " ax.plot(x, y, marker=\".\", markersize=5, lw=2, alpha=0.3, color=\"purple\")\n", - "\n", - " ax.set_xlim([-0.1, 4.1])\n", - " ax.set_ylim([-0.1, 2.1])\n", - " ax.set_aspect(\"equal\")\n", - " fig.tight_layout()\n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "for i, gpkg_path in enumerate(sorted(testdir.glob(\"nofilter_*.gpkg\"))):\n", - " if gpkg_path.is_file() and gpkg_path.suffix == \".gpkg\":\n", - " # if \"butterfly 3b\" not in gpkg_path.stem and \"polygon within polygon 1\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"perfect fit on edge\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"polygon within polygon 1\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"narrow gap\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"driehoek\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"nofilter_narrow gap\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"volgorde groep\" not in gpkg_path.stem and \"nofilter_polygon butterfly 2e\" not in gpkg_path.stem and \"nofilter_perfect fit star 2\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"nofilter_polygon butterfly 2e\" not in gpkg_path.stem and \"nofilter_perfect fit star 2\" not in gpkg_path.stem:\n", - " # continue\n", - "\n", - " display(HTML(f\"

Test {i+1:02d}: {gpkg_path.stem}

\"))\n", - " cross = ParseCrossings(gpkg_path, disable_progress=True, show_log=True)\n", - " df_crossings = cross.find_crossings_with_peilgebieden(\n", - " \"hydroobject\", group_stacked=True, filterlayer=None, agg_links=False\n", - " )\n", - " df_crossings_valid = df_crossings[df_crossings.in_use].copy()\n", - "\n", - " test_output = df_crossings_valid.copy()\n", - " test_output[\"geom_x\"] = np.round(test_output.geometry.x, 8)\n", - " test_output[\"geom_y\"] = np.round(test_output.geometry.y, 8)\n", - " test_output = test_output.drop(columns=\"geometry\", inplace=False)\n", - "\n", - " # Make static test output\n", - " test_output.to_csv(testdir.joinpath(f\"output_{gpkg_path.stem}.csv\"), index=False)\n", - "\n", - " display(df_crossings)\n", - " display(df_crossings_valid)\n", - " make_plot(\n", - " cross.df_gpkg[\"peilgebied\"],\n", - " cross.df_gpkg[\"hydroobject\"],\n", - " cross.df_gpkg[\"streefpeil\"],\n", - " cross.df_gpkg[\"duikersifonhevel\"],\n", - " df_crossings_valid,\n", - " )\n", - " display(HTML(\"
\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "for i, gpkg_path in enumerate(sorted(testdir.glob(\"withfilter_*.gpkg\"))):\n", - " if gpkg_path.is_file() and gpkg_path.suffix == \".gpkg\":\n", - " # if \"withfilter_polygon butterfly 1a\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"withfilter_narrow gap\" not in gpkg_path.stem:\n", - " # continue\n", - " # if \"scheldestromen\" not in gpkg_path.stem:\n", - " # continue\n", - "\n", - " display(HTML(f\"

Test {i+1:02d}: {gpkg_path.stem}

\"))\n", - "\n", - " cross = ParseCrossings(gpkg_path, disable_progress=True, show_log=True)\n", - " _, df_filter, df_crossings = cross.find_crossings_with_peilgebieden(\n", - " \"hydroobject\", group_stacked=True, filterlayer=\"duikersifonhevel\", agg_links=False\n", - " )\n", - " df_crossings_valid = df_crossings[df_crossings.in_use].copy()\n", - "\n", - " test_output = df_crossings_valid.copy()\n", - " test_output[\"geom_x\"] = np.round(test_output.geometry.x, 8)\n", - " test_output[\"geom_y\"] = np.round(test_output.geometry.y, 8)\n", - " test_output = test_output.drop(columns=\"geometry\", inplace=False)\n", - "\n", - " # Make static test output\n", - " test_output.to_csv(testdir.joinpath(f\"output_{gpkg_path.stem}.csv\"), index=False)\n", - "\n", - " display(df_crossings)\n", - " display(df_filter)\n", - " display(df_crossings_valid)\n", - " if \"scheldestromen\" not in gpkg_path.stem:\n", - " make_plot(\n", - " cross.df_gpkg[\"peilgebied\"],\n", - " cross.df_gpkg[\"hydroobject\"],\n", - " cross.df_gpkg[\"streefpeil\"],\n", - " cross.df_gpkg[\"duikersifonhevel\"],\n", - " df_crossings_valid,\n", - " )\n", - " display(HTML(\"
\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "default", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/01_test_parse_crossings.py b/src/peilbeheerst_model/01_test_parse_crossings.py new file mode 100644 index 0000000..aff2054 --- /dev/null +++ b/src/peilbeheerst_model/01_test_parse_crossings.py @@ -0,0 +1,519 @@ +import pathlib +import warnings + +import geopandas as gpd +import matplotlib.pyplot as plt +import numpy as np +import shapely.geometry +import shapely.validation +import tqdm.auto as tqdm +from matplotlib.patches import Polygon + +from peilbeheerst_model import ParseCrossings + +polygons = { + "perfect fit": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ] + }, + "perfect fit star 1": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (1, 0), (2, 1.5), (1.5, 2), (0, 2)]), + shapely.geometry.Polygon([(3, 0), (4, 0), (4, 2), (2.5, 2), (2, 1.5)]), + shapely.geometry.Polygon([(1, 0), (3, 0), (2, 1.5)]), + shapely.geometry.Polygon([(1.5, 2), (2.5, 2), (2, 1.5)]), + ] + }, + "perfect fit star 2": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (1, 0), (2, 0.5), (1.5, 2), (0, 2)]), + shapely.geometry.Polygon([(3, 0), (4, 0), (4, 2), (2.5, 2), (2, 0.5)]), + shapely.geometry.Polygon([(1, 0), (3, 0), (2, 0.5)]), + shapely.geometry.Polygon([(1.5, 2), (2.5, 2), (2, 0.5)]), + ] + }, + "perfect fit on edge": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 1.5), (0, 1.5)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ] + }, + "narrow gap": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2.1, 0), (4, 0), (4, 2), (2.1, 2)]), + ] + }, + "wide gap": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(3, 0), (4, 0), (4, 2), (3, 2)]), + ] + }, + "narrow overlap": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(1.9, 0), (4, 0), (4, 2), (1.9, 2)]), + ] + }, + "wide overlap": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(1, 0), (4, 0), (4, 2), (1, 2)]), + ] + }, + "single cross wide": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + ] + }, + "single cross narrow": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (3.4, 0), (3.4, 2), (0, 2)]), + ] + }, + "single cross at edge": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (3.5, 0), (3.5, 2), (0, 2)]), + ] + }, + "single cross on edge": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 1.5), (0, 1.5)]), + ] + }, + "perfect fit with complete overlap": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ] + }, + "single cross wide with complete overlap": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + ] + }, + "single cross at edge with complete overlap": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (3.5, 0), (3.5, 2), (0, 2)]), + shapely.geometry.Polygon([(0, 0), (3.5, 0), (3.5, 2), (0, 2)]), + ] + }, + "polygon within polygon 1": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (3.0, 0), (3.0, 2), (0, 2)]), + shapely.geometry.Polygon([(0.1, 0.1), (2.8, 0.1), (2.8, 1.9), (0.1, 1.9)]), + ] + }, + "polygon within polygon 2": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (3.4, 0), (3.4, 2), (0, 2)]), + shapely.geometry.Polygon([(0.1, 0.1), (2.8, 0.1), (2.8, 1.9), (0.1, 1.9)]), + ] + }, + "polygon within polygon 3": { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (4.0, 0), (4.0, 2), (0, 2)]), + shapely.geometry.Polygon([(0.1, 0.1), (2.8, 0.1), (2.8, 1.9), (0.1, 1.9)]), + ] + }, + "polygon butterfly 1a": { + "peilgebieden": [ + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (4.0, 0.0), (4.0, 2.0), (2.0, 0.5), (0.0, 2.0)]) + ), + ] + }, + "polygon butterfly 1b": { + "peilgebieden": [ + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (4.0, 0.0), (4.0, 2.0), (2.0, 0.5), (0.0, 2.0)]) + ), + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (4.0, 0.0)]), + ] + }, + "polygon butterfly 2a": { + "peilgebieden": [ + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (4.0, 0.0), (4.0, 2.0), (2.0, 0.4), (0.0, 2.0)]) + ), + ] + }, + "polygon butterfly 2b": { + "peilgebieden": [ + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (4.0, 0.0), (4.0, 2.0), (2.0, 0.4), (0.0, 2.0)]) + ), + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (4.0, 0.0)]), + ] + }, + "polygon butterfly 2c": { + "peilgebieden": [ + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.6), (4.0, 0.0), (4.0, 2.0), (2.0, 0.6), (0.0, 2.0)]) + ), + ] + }, + "polygon butterfly 2d": { + "peilgebieden": [ + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.6), (4.0, 0.0), (4.0, 2.0), (2.0, 0.6), (0.0, 2.0)]) + ), + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.6), (4.0, 0.0)]), + ] + }, + "polygon butterfly 2e": { + "peilgebieden": [ + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.55), (4.0, 0.0), (4.0, 2.0), (2.0, 0.55), (0.0, 2.0)]) + ), + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.55), (4.0, 0.0)]), + ] + }, + "polygon butterfly 3a": { + "peilgebieden": [ + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.5), (0.0, 2.0)]), + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (4.0, 0.0), (2.0, 0.5), (4.0, 2.0), (0.0, 2.0), (2.0, 0.5)]) + ), + ] + }, + "polygon butterfly 3b": { + "peilgebieden": [ + shapely.geometry.Polygon([(0.0, 0.0), (2.0, 0.4), (0.0, 2.0)]), + shapely.validation.make_valid( + shapely.geometry.Polygon([(0.0, 0.0), (4.0, 0.0), (2.0, 0.4), (4.0, 2.0), (0.0, 2.0), (2.0, 0.4)]) + ), + ] + }, +} + +linelist = [ + shapely.geometry.LineString([(0.5, 0.5), (3.5, 0.5)]), + shapely.geometry.LineString([(0.5, 0.7), (2.0, 0.7)]), + shapely.geometry.LineString([(2.0, 0.7), (3.5, 0.7)]), + shapely.geometry.LineString([(0.5, 0.9), (2.0, 0.9)]), + shapely.geometry.LineString([(3.5, 0.9), (2.0, 0.9)]), + shapely.geometry.LineString([(3.8, 0.9), (3.5, 0.9)]), + shapely.geometry.LineString([(3.5, 1.5), (0.5, 1.5)]), +] + +filterlist = [ + shapely.geometry.LineString([(1.0, 0.7), (2.0, 0.7)]), + shapely.geometry.LineString([(2.0, 0.7), (3.0, 0.7)]), + shapely.geometry.LineString([(1.0, 0.9), (3.0, 0.9)]), +] + + +nofilter = polygons.copy() +withfilter = polygons.copy() +for testname, options in nofilter.items(): + options["hydroobjecten"] = linelist.copy() + nofilter[testname] = options.copy() + + options["duikersifonhevel"] = filterlist.copy() + withfilter[testname] = options.copy() + + +nofilter["driehoek 1a"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(0.5, 0.5), (2, 1)]), + shapely.geometry.LineString([(0.5, 1.5), (2, 1)]), + shapely.geometry.LineString([(2, 1), (3.5, 0.5)]), + ], +} +nofilter["driehoek 1b"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(0.5, 0.5), (2.1, 1)]), + shapely.geometry.LineString([(0.5, 1.5), (2.1, 1)]), + shapely.geometry.LineString([(2.1, 1), (3.5, 0.5)]), + ], +} +nofilter["driehoek 1c"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(0.5, 0.5), (1.9, 1)]), + shapely.geometry.LineString([(0.5, 1.5), (1.9, 1)]), + shapely.geometry.LineString([(1.9, 1), (3.5, 0.5)]), + ], +} +nofilter["driehoek 2"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(3.5, 1.5), (2, 1)]), + shapely.geometry.LineString([(3.5, 0.5), (2, 1)]), + shapely.geometry.LineString([(2, 1), (0.5, 0.5)]), + ], +} +nofilter["driehoek 3"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (4, 0), (4, 2), (2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(3.5, 1.5), (2, 1)]), + shapely.geometry.LineString([(3.5, 0.5), (2, 1)]), + shapely.geometry.LineString([(0.5, 0.5), (2, 1)]), + ], +} +nofilter["volgorde groep 1"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]), + shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(0.5, 0.5), (2.1, 0.5)]), + shapely.geometry.LineString([(2.1, 0.5), (3.5, 0.5)]), + ], +} +nofilter["volgorde groep 2"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]), + shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(2.1, 0.5), (0.5, 0.5)]), + shapely.geometry.LineString([(3.5, 0.5), (2.1, 0.5)]), + ], +} +nofilter["volgorde groep 3"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]), + shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(0.5, 0.5), (2.1, 0.5)]), + shapely.geometry.LineString([(3.5, 0.5), (2.1, 0.5)]), + ], +} +nofilter["volgorde groep 4"] = { + "peilgebieden": [ + shapely.geometry.Polygon([(0, 0), (2, 0), (2, 2), (0, 2)]), + shapely.geometry.Polygon([(2, 0), (2.2, 0), (2.2, 2), (2, 2)]), + shapely.geometry.Polygon([(2.2, 0), (4, 0), (4, 2), (2.2, 2)]), + ], + "hydroobjecten": [ + shapely.geometry.LineString([(2.1, 0.5), (0.5, 0.5)]), + shapely.geometry.LineString([(2.1, 0.5), (3.5, 0.5)]), + ], +} + + +testdir = pathlib.Path("tests/data") +if not testdir.exists(): + testdir.mkdir() + +for testlist in tqdm.tqdm([nofilter, withfilter]): + for test_name, options in testlist.items(): + polyid = [f"poly_{i+1}" for i in range(len(options["peilgebieden"]))] + polywl = [float(i + 1) for i in range(len(options["peilgebieden"]))] + # df_peil = gpd.GeoDataFrame(dict(globalid=polyid, geometry=options["peilgebieden"]), crs="epsg:28992") + # df_streef = gpd.GeoDataFrame(dict(globalid=polyid, waterhoogte=polywl, geometry=len(options["peilgebieden"]) * [None]), crs="epsg:28992") + # lineid = [f"line_{i+1}" for i in range(len(options["hydroobjecten"]))] + # df_hydro = gpd.GeoDataFrame(dict(globalid=lineid, geometry=options["hydroobjecten"]), crs="epsg:28992") + + df_peil = gpd.GeoDataFrame({"globalid": polyid, "geometry": options["peilgebieden"]}, crs="epsg:28992") + df_streef = gpd.GeoDataFrame( + {"globalid": polyid, "waterhoogte": polywl, "geometry": len(options["peilgebieden"]) * [None]}, + crs="epsg:28992", + ) + + lineid = [f"line_{i+1}" for i in range(len(options["hydroobjecten"]))] + df_hydro = gpd.GeoDataFrame({"globalid": lineid, "geometry": options["hydroobjecten"]}, crs="epsg:28992") + + if "duikersifonhevel" not in options: + # Empty filter + gpkg_path1 = testdir.joinpath(f"nofilter_{test_name}.gpkg") + df_hydro.to_file(gpkg_path1, layer="hydroobject") + df_peil.to_file(gpkg_path1, layer="peilgebied") + df_streef.to_file(gpkg_path1, layer="streefpeil") + with warnings.catch_warnings(): + warnings.simplefilter(action="ignore", category=UserWarning) + gpd.GeoDataFrame(columns=["globalid", "geometry"]).to_file(gpkg_path1, layer="stuw") + gpd.GeoDataFrame(columns=["globalid", "geometry"]).to_file(gpkg_path1, layer="gemaal") + gpd.GeoDataFrame(columns=["globalid", "geometry"]).to_file(gpkg_path1, layer="duikersifonhevel") + else: + # With filter + gpkg_path2 = testdir.joinpath(f"withfilter_{test_name}.gpkg") + df_hydro.to_file(gpkg_path2, layer="hydroobject") + df_peil.to_file(gpkg_path2, layer="peilgebied") + df_streef.to_file(gpkg_path2, layer="streefpeil") + polyfl = [f"dsh_{i+1}" for i in range(len(options["duikersifonhevel"]))] + # df_filter = gpd.GeoDataFrame(dict(globalid=polyfl, geometry=options["duikersifonhevel"]), crs="epsg:28992") + df_filter = gpd.GeoDataFrame( + {"globalid": polyfl, "geometry": options["duikersifonhevel"]}, crs="epsg:28992" + ) + + df_filter.to_file(gpkg_path2, layer="duikersifonhevel") + with warnings.catch_warnings(): + warnings.simplefilter(action="ignore", category=UserWarning) + gpd.GeoDataFrame(columns=["globalid", "geometry"]).to_file(gpkg_path2, layer="stuw") + gpd.GeoDataFrame(columns=["globalid", "geometry"]).to_file(gpkg_path2, layer="gemaal") + + +def make_plot(df_peil, df_hydro, df_streef, df_filter, df_crossings): + plt.close("all") + fig, ax = plt.subplots(figsize=(8, 5), dpi=100) + + # old_len = len(df_peil) + dfp = df_peil.copy().explode(index_parts=True) + dfs = df_streef.set_index("globalid", inplace=False) + for i, row in enumerate(dfp.itertuples()): + coords = row.geometry.exterior.coords + # if old_len == len(dfp): + xtext = row.geometry.centroid.x - 0.5 + ytext = row.geometry.centroid.y + # else: + # xtext = coords[0][0] + 0.05 + # ytext = coords[0][1] + if (i % 2) == 0: + ytext += 0.05 + else: + ytext -= 0.05 + + ax.text(xtext, ytext, f"{row.globalid}, wl={dfs.waterhoogte.at[row.globalid]}m", alpha=0.5) + ax.add_patch(Polygon(coords, alpha=0.5, lw=1, facecolor="powderblue", edgecolor="skyblue")) + + dfh = df_hydro.explode(index_parts=True) + for row in dfh.itertuples(): + coords = np.array(row.geometry.coords) + x, y = coords[:, 0], coords[:, 1] + offset = row.geometry.interpolate(0.1, normalized=True) + ax.arrow( + x[0], + y[0], + offset.x - x[0], + offset.y - y[0], + shape="full", + lw=0, + length_includes_head=True, + head_width=0.05, + color="steelblue", + ) + ax.text(offset.x, offset.y + 0.05, row.globalid) + ax.plot(x, y, marker=".", markersize=5, lw=1, color="steelblue") + + for row in df_crossings.itertuples(): + if row.crossing_type == "-10": + ax.plot(row.geometry.x, row.geometry.y, marker="s", markersize=5, color="olivedrab") + elif row.crossing_type == "00": + ax.plot(row.geometry.x, row.geometry.y, marker="s", markersize=5, color="indianred") + else: + print(f"{row.crossing_type=} not implemented") + ax.plot(row.geometry.x, row.geometry.y, marker="s", markersize=5, color="yellow") + + dff = df_filter.explode(index_parts=True) + for row in dff.itertuples(): + coords = np.array(row.geometry.coords) + x, y = coords[:, 0], coords[:, 1] + ax.plot(x, y, marker=".", markersize=5, lw=2, alpha=0.3, color="purple") + + ax.set_xlim([-0.1, 4.1]) + ax.set_ylim([-0.1, 2.1]) + ax.set_aspect("equal") + fig.tight_layout() + plt.show() + + +for i, gpkg_path in enumerate(sorted(testdir.glob("nofilter_*.gpkg"))): + if gpkg_path.is_file() and gpkg_path.suffix == ".gpkg": + # if "butterfly 3b" not in gpkg_path.stem and "polygon within polygon 1" not in gpkg_path.stem: + # continue + # if "perfect fit on edge" not in gpkg_path.stem: + # continue + # if "polygon within polygon 1" not in gpkg_path.stem: + # continue + # if "narrow gap" not in gpkg_path.stem: + # continue + # if "driehoek" not in gpkg_path.stem: + # continue + # if "nofilter_narrow gap" not in gpkg_path.stem: + # continue + # if "volgorde groep" not in gpkg_path.stem and "nofilter_polygon butterfly 2e" not in gpkg_path.stem and "nofilter_perfect fit star 2" not in gpkg_path.stem: + # continue + # if "nofilter_polygon butterfly 2e" not in gpkg_path.stem and "nofilter_perfect fit star 2" not in gpkg_path.stem: + # continue + + print(f"Test {i+1:02d}: {gpkg_path.stem}") + cross = ParseCrossings(gpkg_path, disable_progress=True, show_log=True) + df_crossings = cross.find_crossings_with_peilgebieden( + "hydroobject", group_stacked=True, filterlayer=None, agg_links=False + ) + df_crossings_valid = df_crossings[df_crossings.in_use].copy() + + test_output = df_crossings_valid.copy() + test_output["geom_x"] = np.round(test_output.geometry.x, 8) + test_output["geom_y"] = np.round(test_output.geometry.y, 8) + test_output = test_output.drop(columns="geometry", inplace=False) + + # Make static test output + test_output.to_csv(testdir.joinpath(f"output_{gpkg_path.stem}.csv"), index=False) + + print(df_crossings) + print(df_crossings_valid) + make_plot( + cross.df_gpkg["peilgebied"], + cross.df_gpkg["hydroobject"], + cross.df_gpkg["streefpeil"], + cross.df_gpkg["duikersifonhevel"], + df_crossings_valid, + ) + + +for i, gpkg_path in enumerate(sorted(testdir.glob("withfilter_*.gpkg"))): + if gpkg_path.is_file() and gpkg_path.suffix == ".gpkg": + # if "withfilter_polygon butterfly 1a" not in gpkg_path.stem: + # continue + # if "withfilter_narrow gap" not in gpkg_path.stem: + # continue + # if "scheldestromen" not in gpkg_path.stem: + # continue + + print(f"Test {i+1:02d}: {gpkg_path.stem}") + + cross = ParseCrossings(gpkg_path, disable_progress=True, show_log=True) + _, df_filter, df_crossings = cross.find_crossings_with_peilgebieden( + "hydroobject", group_stacked=True, filterlayer="duikersifonhevel", agg_links=False + ) + df_crossings_valid = df_crossings[df_crossings.in_use].copy() + + test_output = df_crossings_valid.copy() + test_output["geom_x"] = np.round(test_output.geometry.x, 8) + test_output["geom_y"] = np.round(test_output.geometry.y, 8) + test_output = test_output.drop(columns="geometry", inplace=False) + + # Make static test output + test_output.to_csv(testdir.joinpath(f"output_{gpkg_path.stem}.csv"), index=False) + + print(df_crossings) + print(df_filter) + print(df_crossings_valid) + if "scheldestromen" not in gpkg_path.stem: + make_plot( + cross.df_gpkg["peilgebied"], + cross.df_gpkg["hydroobject"], + cross.df_gpkg["streefpeil"], + cross.df_gpkg["duikersifonhevel"], + df_crossings_valid, + ) diff --git a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb deleted file mode 100644 index 574aa95..0000000 --- a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.ipynb +++ /dev/null @@ -1,101 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "import pathlib\n", - "\n", - "import geopandas as gpd\n", - "import pandas as pd\n", - "from IPython.core.display import HTML\n", - "\n", - "from peilbeheerst_model import ParseCrossings, waterschap_data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "print_df = {}\n", - "for waterschap, waterschap_struct in waterschap_data.items():\n", - " for funcname, func_args in waterschap_struct.items():\n", - " if funcname not in print_df:\n", - " print_df[funcname] = []\n", - " print_df[funcname].append(pd.Series(func_args, name=waterschap))\n", - "\n", - "for funcname, df in print_df.items():\n", - " print(HTML(f\"

Function {funcname}:

\"))\n", - " print(pd.DataFrame(df))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "for waterschap, waterschap_struct in waterschap_data.items():\n", - " print(f\"\\n{waterschap}...\")\n", - "\n", - " init_settings, crossing_settings = waterschap_struct.values()\n", - " gpkg = pathlib.Path(init_settings[\"output_path\"])\n", - " if not gpkg.exists():\n", - " raise ValueError(gpkg)\n", - "\n", - " df_peilgebieden = gpd.read_file(gpkg, layer=\"peilgebied\")\n", - " org_shape = df_peilgebieden.shape\n", - " df_peilgebieden = ParseCrossings._make_valid_2dgeom(df_peilgebieden)\n", - "\n", - " df_peilgebieden = ParseCrossings.add_krw_to_peilgebieden(\n", - " df_peilgebieden,\n", - " init_settings[\"krw_path\"],\n", - " init_settings[\"krw_column_id\"],\n", - " init_settings[\"krw_column_name\"],\n", - " init_settings[\"krw_min_overlap\"],\n", - " \",\",\n", - " )\n", - "\n", - " assert df_peilgebieden.shape[0] == org_shape[0]\n", - " df_peilgebieden.to_file(gpkg, layer=\"peilgebied\")\n", - " print(gpkg)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py new file mode 100644 index 0000000..dbe31c1 --- /dev/null +++ b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py @@ -0,0 +1,44 @@ +import pathlib + +import geopandas as gpd +import pandas as pd +from IPython.core.display import HTML + +from peilbeheerst_model import ParseCrossings, waterschap_data + +print_df = {} +for waterschap, waterschap_struct in waterschap_data.items(): + for funcname, func_args in waterschap_struct.items(): + if funcname not in print_df: + print_df[funcname] = [] + print_df[funcname].append(pd.Series(func_args, name=waterschap)) + +for funcname, df in print_df.items(): + print(HTML(f"

Function {funcname}:

")) + print(pd.DataFrame(df)) + + +for waterschap, waterschap_struct in waterschap_data.items(): + print(f"\n{waterschap}...") + + init_settings, crossing_settings = waterschap_struct.values() + gpkg = pathlib.Path(init_settings["output_path"]) + if not gpkg.exists(): + raise ValueError(gpkg) + + df_peilgebieden = gpd.read_file(gpkg, layer="peilgebied") + org_shape = df_peilgebieden.shape + df_peilgebieden = ParseCrossings._make_valid_2dgeom(df_peilgebieden) + + df_peilgebieden = ParseCrossings.add_krw_to_peilgebieden( + df_peilgebieden, + init_settings["krw_path"], + init_settings["krw_column_id"], + init_settings["krw_column_name"], + init_settings["krw_min_overlap"], + ",", + ) + + assert df_peilgebieden.shape[0] == org_shape[0] + df_peilgebieden.to_file(gpkg, layer="peilgebied") + print(gpkg) diff --git a/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb b/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb deleted file mode 100644 index f1d6006..0000000 --- a/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.ipynb +++ /dev/null @@ -1,1194 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "import warnings\n", - "from pathlib import Path\n", - "\n", - "import pandas as pd\n", - "import ribasim\n", - "from crossings_to_ribasim import *\n", - "from ribasim import Model\n", - "\n", - "pd.set_option(\"display.max_columns\", None)\n", - "warnings.filterwarnings(\"ignore\") # Let op!!!!!!!!!!!!!! Waarschuwingen worden genegeerd\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "ribasim.__version__\n", - "\n", - "\n", - "# load ribasim_nl\n", - "module_path = Path.cwd() / \"../ribasim_nl/\"\n", - "sys.path.append(str(module_path))\n", - "\n", - "# load crossings_to_ribasim.py\n", - "sys.path.append(\"peilbeheerst_model\")" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "# Amstel, Gooi en Vecht" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"AmstelGooienVecht\",\n", - " \"modelname\": \"20240417_samenwerkdag\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/AmstelGooienVecht/AGV.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/AmstelGooienVecht/agv_crossings_v05.gpkg\",\n", - " \"path_boezem\": \"../../../../Data_shortest_path/AGV/AGV_shortest_path.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge.df = edge\n", - "\n", - "model.basin.node.df = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node.df = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node.df = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.manning_resistance.node.df = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.level_boundary.node.df = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node.df = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.terminal.node.df = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(model, post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "# Delfland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"Delfland\",\n", - " \"modelname\": \"20240423_omgedraaid\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/Delfland/Delfland.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/Delfland/delfland_crossings_v08.gpkg\",\n", - " \"path_boezem\": \"../../../../Data_shortest_path/Delfland/Delfland_shortest_path.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "# Hollandse Delta" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"HollandseDelta\",\n", - " \"modelname\": \"20240417_samenwerkdag\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/Hollandse_Delta/HD.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/Hollandse_Delta/hd_crossings_v06.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " \"path_boezem\": \"../../../../Data_shortest_path/Hollandse_Delta/HD_shortest_path.gpkg\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Hollands Noorderkwartier" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"HollandsNoorderkwartier\",\n", - " \"modelname\": \"20240502\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/HHNK/Noorderkwartier.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/HHNK/hhnk_crossings_v26.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " \"path_boezem\": \"../../../../Data_shortest_path/HHNK/HHNK_shortest_path.gpkg\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "edges = waterboard.change_boezems_manually(edges)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "# Rijnland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"Rijnland\",\n", - " \"modelname\": \"20240414_aggregated\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/Rijnland/Rijnland.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/Rijnland/rijnland_crossings_v04.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_boezem\": \"../../../../Data_shortest_path/Rijnland/Rijnland_shortest_path.gpkg\",\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(post_processed_data=post_processed_data, crossings=crossings, model=model)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "# Rivierenland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"Rivierenland\",\n", - " \"modelname\": \"20240402_bug_fix\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/WSRL/WSRL.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/WSRL/wsrl_crossings_v06.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " \"path_boezem\": \"../../../../Data_shortest_path/WSRL/WSRL_shortest_path.gpkg\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "# Scheldestromen" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"Scheldestromen\",\n", - " \"modelname\": \"20240417_samenwerkdag\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/Scheldestromen/Scheldestromen.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/Scheldestromen/scheldestromen_crossings_v02.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_boezem\": \"../../../../Data_shortest_path/Scheldestromen/Scheldestromen_shortest_path.gpkg\",\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge.df = edge\n", - "\n", - "model.basin.node.df = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node.df = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node.df = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.manning_resistance.node.df = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.level_boundary.node.df = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node.df = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.terminal.node.df = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(model, post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "# Schieland en de Krimpenerwaard" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"SchielandendeKrimpenerwaard\",\n", - " \"modelname\": \"20240429_check\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/HHSK/HHSK.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/HHSK/hhsk_crossings_v04.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_boezem\": \"../../../../Data_shortest_path/HHSK/HHSK_shortest_path.gpkg\",\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "model.tabulated_rating_curve.static" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "# Wetterskip" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"WetterskipFryslan\",\n", - " \"modelname\": \"20240417_samenwerkdag\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/Wetterskip/wetterskip_crossings_v06.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_boezem\": \"../../../../Data_shortest_path/Wetterskip/Wetterskip_shortest_path.gpkg\",\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True, ############################ LET OP\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "# Zuiderzeeland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "model_characteristics = {\n", - " # model description\n", - " \"waterschap\": \"Zuiderzeeland\",\n", - " \"modelname\": \"20240417_samenwerkdag\",\n", - " \"modeltype\": \"boezemmodel\",\n", - " # define paths\n", - " \"path_postprocessed_data\": r\"../../../../Data_postprocessed/Waterschappen/Zuiderzeeland/Zuiderzeeland.gpkg\",\n", - " \"path_crossings\": \"../../../../Data_crossings/Zuiderzeeland/zzl_crossings_v05.gpkg\",\n", - " \"path_Pdrive\": None,\n", - " \"path_goodcloud_password\": \"../../../../Data_overig/password_goodcloud.txt\",\n", - " \"path_boezem\": \"../../../../Data_shortest_path/Zuiderzeeland/Zuiderzeeland_shortest_path.gpkg\",\n", - " # apply filters\n", - " \"crossings_layer\": \"crossings_hydroobject_filtered\",\n", - " \"in_use\": True,\n", - " \"agg_links_in_use\": True,\n", - " \"agg_areas_in_use\": True,\n", - " \"aggregation\": True,\n", - " # data storage settings\n", - " \"write_Pdrive\": False,\n", - " \"write_Zdrive\": True,\n", - " \"write_goodcloud\": True,\n", - " \"write_checks\": True,\n", - " \"write_symbology\": True,\n", - " # numerical settings\n", - " \"solver\": None,\n", - " \"logging\": None,\n", - " \"starttime\": \"2024-01-01 00:00:00\",\n", - " \"endtime\": \"2024-01-02 00:00:00\",\n", - "}\n", - "\n", - "waterboard = CrossingsToRibasim(model_characteristics=model_characteristics)\n", - "\n", - "post_processed_data, crossings = waterboard.read_files()\n", - "post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings)\n", - "crossings = waterboard.assign_node_ids(crossings)\n", - "edges = waterboard.create_edges(crossings)\n", - "nodes, edges = waterboard.create_nodes(crossings, edges)\n", - "edges = waterboard.embed_boezems(edges, post_processed_data, crossings)\n", - "\n", - "\n", - "# create individual model parts of the network\n", - "network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics)\n", - "\n", - "edge = network.edge()\n", - "basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin()\n", - "pump_node, pump_static = network.pump()\n", - "tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve()\n", - "level_boundary_node, level_boundary_static = network.level_boundary()\n", - "flow_boundary_node, flow_boundary_static = network.flow_boundary()\n", - "manning_resistance_node, manning_resistance_static = network.manning_resistance()\n", - "terminal_node = network.terminal()\n", - "\n", - "# linear_resistance = network.linear_resistance()\n", - "# fractional_flow = network.fractional_flow()\n", - "# outlet = network.outlet()\n", - "# discrete_control = network.discrete_control()\n", - "# pid_control = network.pid_control()\n", - "\n", - "# insert the individual model modules in an actual model\n", - "model = Model(starttime=model_characteristics[\"starttime\"], endtime=model_characteristics[\"endtime\"], crs=\"EPSG:28992\")\n", - "\n", - "model.edge = edge\n", - "\n", - "model.basin.node = basin_node\n", - "model.basin.profile = basin_profile\n", - "model.basin.static = basin_static\n", - "model.basin.state = basin_state\n", - "model.basin.area = basin_area\n", - "\n", - "model.pump.node = pump_node\n", - "model.pump.static = pump_static\n", - "\n", - "model.tabulated_rating_curve.node = tabulated_rating_curve_node\n", - "model.tabulated_rating_curve.static = tabulated_rating_curve_static\n", - "\n", - "model.level_boundary.node = level_boundary_node\n", - "model.level_boundary.static = level_boundary_static\n", - "\n", - "model.flow_boundary.node = flow_boundary_node\n", - "model.flow_boundary.static = flow_boundary_static\n", - "\n", - "model.manning_resistance.node = manning_resistance_node\n", - "model.manning_resistance.static = manning_resistance_static\n", - "\n", - "model.terminal.node = terminal_node\n", - "\n", - "# add checks and metadata\n", - "checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings)\n", - "model = network.add_meta_data(model, checks, post_processed_data, crossings)\n", - "\n", - "# write the result\n", - "network.WriteResults(model=model, checks=checks)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.py b/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.py new file mode 100644 index 0000000..2bb085b --- /dev/null +++ b/src/peilbeheerst_model/02_crossings_to_ribasim_notebook.py @@ -0,0 +1,1015 @@ +import sys +import warnings +from pathlib import Path + +import pandas as pd +import ribasim +from crossings_to_ribasim import * +from ribasim import Model + +pd.set_option("display.max_columns", None) +warnings.filterwarnings("ignore") # Let op!!!!!!!!!!!!!! Waarschuwingen worden genegeerd + + +ribasim.__version__ + + +# load ribasim_nl +module_path = Path.cwd() / "../ribasim_nl/" +sys.path.append(str(module_path)) + +# load crossings_to_ribasim.py +sys.path.append("peilbeheerst_model") + + +# # Amstel, Gooi en Vecht + + +model_characteristics = { + # model description + "waterschap": "AmstelGooienVecht", + "modelname": "20240417_samenwerkdag", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/AmstelGooienVecht/AGV.gpkg", + "path_crossings": "../../../../Data_crossings/AmstelGooienVecht/agv_crossings_v05.gpkg", + "path_boezem": "../../../../Data_shortest_path/AGV/AGV_shortest_path.gpkg", + "path_Pdrive": None, + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge.df = edge + +model.basin.node.df = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node.df = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node.df = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.manning_resistance.node.df = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.level_boundary.node.df = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node.df = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.terminal.node.df = terminal_node + +# add checks and metadata +checks = network.check(model, post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Delfland + + +model_characteristics = { + # model description + "waterschap": "Delfland", + "modelname": "20240423_omgedraaid", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/Delfland/Delfland.gpkg", + "path_crossings": "../../../../Data_crossings/Delfland/delfland_crossings_v08.gpkg", + "path_boezem": "../../../../Data_shortest_path/Delfland/Delfland_shortest_path.gpkg", + "path_Pdrive": None, + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.manning_resistance.node = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.terminal.node = terminal_node + + +# add checks and metadata +checks = network.check(post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Hollandse Delta + + +model_characteristics = { + # model description + "waterschap": "HollandseDelta", + "modelname": "20240417_samenwerkdag", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/Hollandse_Delta/HD.gpkg", + "path_crossings": "../../../../Data_crossings/Hollandse_Delta/hd_crossings_v06.gpkg", + "path_Pdrive": None, + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + "path_boezem": "../../../../Data_shortest_path/Hollandse_Delta/HD_shortest_path.gpkg", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.manning_resistance.node = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Hollands Noorderkwartier + + +model_characteristics = { + # model description + "waterschap": "HollandsNoorderkwartier", + "modelname": "20240502", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/HHNK/Noorderkwartier.gpkg", + "path_crossings": "../../../../Data_crossings/HHNK/hhnk_crossings_v26.gpkg", + "path_Pdrive": None, + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + "path_boezem": "../../../../Data_shortest_path/HHNK/HHNK_shortest_path.gpkg", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) +edges = waterboard.change_boezems_manually(edges) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Rijnland + + +model_characteristics = { + # model description + "waterschap": "Rijnland", + "modelname": "20240414_aggregated", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/Rijnland/Rijnland.gpkg", + "path_crossings": "../../../../Data_crossings/Rijnland/rijnland_crossings_v04.gpkg", + "path_Pdrive": None, + "path_boezem": "../../../../Data_shortest_path/Rijnland/Rijnland_shortest_path.gpkg", + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.manning_resistance.node = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(post_processed_data=post_processed_data, crossings=crossings, model=model) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Rivierenland + + +model_characteristics = { + # model description + "waterschap": "Rivierenland", + "modelname": "20240402_bug_fix", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/WSRL/WSRL.gpkg", + "path_crossings": "../../../../Data_crossings/WSRL/wsrl_crossings_v06.gpkg", + "path_Pdrive": None, + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + "path_boezem": "../../../../Data_shortest_path/WSRL/WSRL_shortest_path.gpkg", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.manning_resistance.node = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Scheldestromen + + +model_characteristics = { + # model description + "waterschap": "Scheldestromen", + "modelname": "20240417_samenwerkdag", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/Scheldestromen/Scheldestromen.gpkg", + "path_crossings": "../../../../Data_crossings/Scheldestromen/scheldestromen_crossings_v02.gpkg", + "path_Pdrive": None, + "path_boezem": "../../../../Data_shortest_path/Scheldestromen/Scheldestromen_shortest_path.gpkg", + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge.df = edge + +model.basin.node.df = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node.df = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node.df = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.manning_resistance.node.df = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.level_boundary.node.df = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node.df = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.terminal.node.df = terminal_node + +# add checks and metadata +checks = network.check(model, post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Schieland en de Krimpenerwaard + + +model_characteristics = { + # model description + "waterschap": "SchielandendeKrimpenerwaard", + "modelname": "20240429_check", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/HHSK/HHSK.gpkg", + "path_crossings": "../../../../Data_crossings/HHSK/hhsk_crossings_v04.gpkg", + "path_Pdrive": None, + "path_boezem": "../../../../Data_shortest_path/HHSK/HHSK_shortest_path.gpkg", + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() + +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.manning_resistance.node = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +model.tabulated_rating_curve.static + + +# # Wetterskip + + +model_characteristics = { + # model description + "waterschap": "WetterskipFryslan", + "modelname": "20240417_samenwerkdag", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip.gpkg", + "path_crossings": "../../../../Data_crossings/Wetterskip/wetterskip_crossings_v06.gpkg", + "path_Pdrive": None, + "path_boezem": "../../../../Data_shortest_path/Wetterskip/Wetterskip_shortest_path.gpkg", + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, ############################ LET OP + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.manning_resistance.node = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) + + +# # Zuiderzeeland + + +model_characteristics = { + # model description + "waterschap": "Zuiderzeeland", + "modelname": "20240417_samenwerkdag", + "modeltype": "boezemmodel", + # define paths + "path_postprocessed_data": r"../../../../Data_postprocessed/Waterschappen/Zuiderzeeland/Zuiderzeeland.gpkg", + "path_crossings": "../../../../Data_crossings/Zuiderzeeland/zzl_crossings_v05.gpkg", + "path_Pdrive": None, + "path_goodcloud_password": "../../../../Data_overig/password_goodcloud.txt", + "path_boezem": "../../../../Data_shortest_path/Zuiderzeeland/Zuiderzeeland_shortest_path.gpkg", + # apply filters + "crossings_layer": "crossings_hydroobject_filtered", + "in_use": True, + "agg_links_in_use": True, + "agg_areas_in_use": True, + "aggregation": True, + # data storage settings + "write_Pdrive": False, + "write_Zdrive": True, + "write_goodcloud": True, + "write_checks": True, + "write_symbology": True, + # numerical settings + "solver": None, + "logging": None, + "starttime": "2024-01-01 00:00:00", + "endtime": "2024-01-02 00:00:00", +} + +waterboard = CrossingsToRibasim(model_characteristics=model_characteristics) + +post_processed_data, crossings = waterboard.read_files() +post_processed_data, crossings = waterboard.routing_processor(post_processed_data, crossings) +crossings = waterboard.assign_node_ids(crossings) +edges = waterboard.create_edges(crossings) +nodes, edges = waterboard.create_nodes(crossings, edges) +edges = waterboard.embed_boezems(edges, post_processed_data, crossings) + + +# create individual model parts of the network +network = RibasimNetwork(nodes=nodes, edges=edges, model_characteristics=model_characteristics) + +edge = network.edge() +basin_node, basin_profile, basin_static, basin_state, basin_area = network.basin() +pump_node, pump_static = network.pump() +tabulated_rating_curve_node, tabulated_rating_curve_static = network.tabulated_rating_curve() +level_boundary_node, level_boundary_static = network.level_boundary() +flow_boundary_node, flow_boundary_static = network.flow_boundary() +manning_resistance_node, manning_resistance_static = network.manning_resistance() +terminal_node = network.terminal() + +# linear_resistance = network.linear_resistance() +# fractional_flow = network.fractional_flow() +# outlet = network.outlet() +# discrete_control = network.discrete_control() +# pid_control = network.pid_control() + +# insert the individual model modules in an actual model +model = Model(starttime=model_characteristics["starttime"], endtime=model_characteristics["endtime"], crs="EPSG:28992") + +model.edge = edge + +model.basin.node = basin_node +model.basin.profile = basin_profile +model.basin.static = basin_static +model.basin.state = basin_state +model.basin.area = basin_area + +model.pump.node = pump_node +model.pump.static = pump_static + +model.tabulated_rating_curve.node = tabulated_rating_curve_node +model.tabulated_rating_curve.static = tabulated_rating_curve_static + +model.level_boundary.node = level_boundary_node +model.level_boundary.static = level_boundary_static + +model.flow_boundary.node = flow_boundary_node +model.flow_boundary.static = flow_boundary_static + +model.manning_resistance.node = manning_resistance_node +model.manning_resistance.static = manning_resistance_static + +model.terminal.node = terminal_node + +# add checks and metadata +checks = network.check(model=model, post_processed_data=post_processed_data, crossings=crossings) +model = network.add_meta_data(model, checks, post_processed_data, crossings) + +# write the result +network.WriteResults(model=model, checks=checks) diff --git a/src/peilbeheerst_model/03_test_outlets.ipynb b/src/peilbeheerst_model/03_test_outlets.ipynb deleted file mode 100644 index d05157b..0000000 --- a/src/peilbeheerst_model/03_test_outlets.ipynb +++ /dev/null @@ -1,365 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "\n", - "from outlets import case1, case2\n", - "\n", - "sys.path.append(\"peilbeheerst_model\")\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "# Case 1" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Example 1: boundary and basin levels on target " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "case1_example1 = case1(\"case1_example1\")\n", - "case1_example1.create_model()" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Example 2: boundary levels below target" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "case1_example2 = case1(\"case1_example2\")\n", - "case1_example2.create_model()" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Example 3: boundary levels on target, initial state below target" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "case1_example3 = case1(\"case1_example3\")\n", - "case1_example3.create_model()" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "### Example 4: boundary levels on target, initial state above target" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "case1_example1 = case1(\"case1_example4\")\n", - "case1_example1.create_model()" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "# Case 2" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "### Example 1: boundary and basin levels on target " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# first, load in the simple model of case 1. Copy it.\n", - "case2_example1 = case1(\"case2_example1\")\n", - "case2_base_model = case2_example1.create_model(copy=True)\n", - "\n", - "# then, change it to a case 2 category\n", - "case2_example1 = case2(\"case2_example1\", model=case2_base_model)\n", - "case2_example1.create_model()" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "### Example 2: boundary and basins below target, third basin above\n", - "The third basins should fill up the others, or else be pumped to target level by the second pump." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# first, load in the simple model of case 1. Copy it.\n", - "case2_example1 = case1(\"case2_example2\")\n", - "case2_base_model = case2_example1.create_model(copy=True)\n", - "\n", - "case2_example2 = case2(\"case2_example2\", model=case2_base_model)\n", - "case2_example2.create_model()" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "### Example 3: boundary and basins below target, third basin above, pump rate of third peilgebied set to 0. \n", - "Quite similair to case 2 example 2, exvept that the pump rate of first pump set to a low rate, so a rising water level is expected in the other basins. The third basins should fill up the others." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# first, load in the simple model of case 1. Copy it.\n", - "case2_example1 = case1(\"case2_example3\")\n", - "case2_base_model = case2_example1.create_model(copy=True)\n", - "\n", - "# implement the second model\n", - "case2_example3 = case2(\"case2_example3\", model=case2_base_model)\n", - "case2_example3.create_model()" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "### Example 4: low target level in third basin, results in incorrect flow direction\n", - "\n", - "The water level in each basin, including the boundaries, are too low. No water should be flowing. However, only the initial level is higher than the target level of the third basin. Water should only flow from the third basin to the last level boundary." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# first, load in the simple model of case 1. Copy it.\n", - "case2_example4 = case1(\"case2_example4\")\n", - "case2_base_model = case2_example4.create_model(copy=True)\n", - "\n", - "# implement the second model\n", - "case2_example4 = case2(\"case2_example4\", model=case2_base_model)\n", - "case2_example4.create_model()" - ] - }, - { - "cell_type": "raw", - "id": "19", - "metadata": {}, - "source": [ - "Volgende case zou kunnen zijn dat er tussen twee peilgebieden (met wel of niet andere target levels) er outlets zijn, die van de een naar de ander gaan, en andersom. Maar in principe verwacht ik daar geen gekke situaties: de outlet laat alleen water stromen als dit onder vrij verval kan, en pompt geen water omhoog. Het enige wat wel gek zou kunnen worden, is als beide peilen rond hetzelfde niveau komen. Dan zou het water de ene tijdstap van links naar rechts kunnen stromen, en de andere momenten de andere kant op. Ik kan me voorstellen dat dit tot instabiliteiten leidt. Weet alleen niet zeker of dit gaat optreden bij simpele voorbeelden als hier." - ] - }, - { - "cell_type": "raw", - "id": "20", - "metadata": {}, - "source": [ - "Wat wel interessant zou zijn is het toch wel toevoegen van ContinuousControls. Eerst leek dit niet een logische stap, omdat ik wilde dat de outlet zou luisteren naar boven- en benedenstroomse peil. Maar nu doet dat het eigenlijk alleen naar bovenstrooms. \n", - "\n", - "Punt van reflectie: is dat uberhaupt wel de goede aanpak? Hoe weet de basin dan dat het water moet doorlaten? Je kan dit doen door de crest level een stukje lager te zetten dan streefpeil, maar dat houdt dan wel in dat er ALTIJD water stroomt. Qua doorspoeling is dat opzich nog zo gek niet, maar het is niet de meest chique manier. Ook heb je hier dan weinig controle op, wat je misschien wel wil hebben.\n" - ] - }, - { - "cell_type": "raw", - "id": "21", - "metadata": {}, - "source": [ - "Conclusie(?): de discrete controls moeten OOK gaan luisteren naar benedenstroomse peil. Dit toch wel doen aan de hand van de vier verschillende opties, afhankelijk wat de streefpeil van peilgebied 1 en peilgebied 2 is. \n", - "\n", - "Om instabiliteiten tegen te gaan is het wellicht goed om de grenzen niet precies op streefpeil te zetten, maar juist met een bepaalde afwijking. De afwijking om water door te mogen voeren van peilgebied 1 naar peilgebied2 moet kunnen zolang het waterniveau van peilgebied 1 niet 5 cm onder streefpeil zakt. De inlaat vanaf de boezem naar peilgebied 1 moet in principe sturen op exact streefpeil. " - ] - }, - { - "cell_type": "raw", - "id": "22", - "metadata": {}, - "source": [ - "1) Hoe verhoudt dit zich tot de min_crest_level en een enkele listen_to_node?\n", - "\n", - "De min_crest_level voert met 1 listen_node_id altijd water door. In het geval van hierboven zou water altijd van peilgebied 1 naar peilgebied 2 gaan, terwijl peilgebied 2 wellicht helemaal geen water nodig heeft terwijl de inlaat van de boezem naar peilgebied 1 wel water aan het inlaten is voor peilgebied 2. \n", - "\n", - "\n", - "\n", - "2) Hoe verhoudt de min_crest_level zich met dubbele listen_to_nodes?\n", - "\n", - "Min_Crest_level lijkt me bijna overbodig worden. \n", - "\n", - "\n", - "3) Stel er komt alleen een min_crest_level op de genoemde 5 cm onder streefpeil van peilgebied 1, dat wordt als het ware een schaduw listen node. Dan hoeft de outlet alleen nog maar te luisteren naar de basin die benedenstrooms ligt?\n", - "\n", - "Dat is niet waar, want stel er is een minimum crest level gedefinieerd die 5 cm onder streefpeil ligt van peilgebied 1. Wat als de water stand 2 cm onder streefpeil ligt (dus wel nog boven crest level), en er is geen water nodig in peilgebied 2? --> dan blijft het stromen, terwijl dat niet moet." - ] - }, - { - "cell_type": "raw", - "id": "23", - "metadata": {}, - "source": [ - "Conclusie: vorige conclusie is correct. Luisteren naar zowel boven- als benedestrooms." - ] - }, - { - "cell_type": "raw", - "id": "24", - "metadata": {}, - "source": [ - "Stappenplan voor AGV:\n", - "\n", - "Loopen per DiscreteControl lijkt mij geen goed idee. \n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "# Thrashbin" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "example1_characteristics = {}\n", - "\n", - "\n", - "# solver\n", - "example1_characteristics[\"starttime\"] = \"2024-01-01 00:00:00\"\n", - "example1_characteristics[\"endtime\"] = \"2024-01-03 00:00:00\"\n", - "example1_characteristics[\"saveat\"] = 60\n", - "\n", - "# boezem settings (LevelBoundary)\n", - "example1_characteristics[\"boezem1_level\"] = 3\n", - "example1_characteristics[\"boezem2_level\"] = 3\n", - "\n", - "# peilgebied settings (Basins)\n", - "example1_characteristics[\"basin1_profile_area\"] = [0.01, 10000.0]\n", - "example1_characteristics[\"basin1_profile_level\"] = [1.0, 5.0]\n", - "example1_characteristics[\"basin1_initial_level\"] = [3]\n", - "example1_characteristics[\"basin1_target_level\"] = [2]\n", - "\n", - "\n", - "example1_characteristics[\"basin2_profile_area\"] = [0.01, 10000.0]\n", - "example1_characteristics[\"basin2_profile_level\"] = [0.0, 5.0]\n", - "example1_characteristics[\"basin2_initial_level\"] = [2]\n", - "example1_characteristics[\"basin2_target_level\"] = [1]\n", - "\n", - "example1_characteristics[\"evaporation\"] = 5 # mm/day, will be converted later to m/s\n", - "example1_characteristics[\"precipitation\"] = 5 # mm/day, will be converted later to m/s\n", - "\n", - "# connection node settings (Outlets, Pumpts)\n", - "example1_characteristics[\"outlet1_flow_rate\"] = 0.010\n", - "example1_characteristics[\"outlet1_min_crest_level\"] = 2.90\n", - "\n", - "example1_characteristics[\"outlet2_flow_rate\"] = 0.010\n", - "example1_characteristics[\"outlet2_min_crest_level\"] = 1.90\n", - "\n", - "example1_characteristics[\"pump_flow_rate\"] = 10 / 60 # [x] m3 / minute\n", - "\n", - "# general settings\n", - "example1_characteristics[\"plot\"] = True\n", - "example1_characteristics[\"crs\"] = \"EPSG:4326\"\n", - "example1_characteristics[\"case\"] = \"case1\"\n", - "example1_characteristics[\"example\"] = \"example1\"\n", - "example1_characteristics[\"results_dir\"] = r\"../../../../Outlet_tests/\"\n", - "example1_characteristics[\"show_progress\"] = False\n", - "example1_characteristics[\"show_results\"] = True\n", - "\n", - "# solver settings\n", - "example1_characteristics[\"saveat\"] = 60" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/03_test_outlets.py b/src/peilbeheerst_model/03_test_outlets.py new file mode 100644 index 0000000..6315dfc --- /dev/null +++ b/src/peilbeheerst_model/03_test_outlets.py @@ -0,0 +1,161 @@ +import sys + +from outlets import case1, case2 + +sys.path.append("peilbeheerst_model") + + +# # Case 1 + +# ### Example 1: boundary and basin levels on target + + +case1_example1 = case1("case1_example1") +case1_example1.create_model() + + +# ### Example 2: boundary levels below target + + +case1_example2 = case1("case1_example2") +case1_example2.create_model() + + +# ### Example 3: boundary levels on target, initial state below target + + +case1_example3 = case1("case1_example3") +case1_example3.create_model() + + +# ### Example 4: boundary levels on target, initial state above target + + +case1_example1 = case1("case1_example4") +case1_example1.create_model() + + +# # Case 2 + +# ### Example 1: boundary and basin levels on target + + +# first, load in the simple model of case 1. Copy it. +case2_example1 = case1("case2_example1") +case2_base_model = case2_example1.create_model(copy=True) + +# then, change it to a case 2 category +case2_example1 = case2("case2_example1", model=case2_base_model) +case2_example1.create_model() + + +# ### Example 2: boundary and basins below target, third basin above +# The third basins should fill up the others, or else be pumped to target level by the second pump. + + +# first, load in the simple model of case 1. Copy it. +case2_example1 = case1("case2_example2") +case2_base_model = case2_example1.create_model(copy=True) + +case2_example2 = case2("case2_example2", model=case2_base_model) +case2_example2.create_model() + + +# ### Example 3: boundary and basins below target, third basin above, pump rate of third peilgebied set to 0. +# Quite similair to case 2 example 2, exvept that the pump rate of first pump set to a low rate, so a rising water level is expected in the other basins. The third basins should fill up the others. + + +# first, load in the simple model of case 1. Copy it. +case2_example1 = case1("case2_example3") +case2_base_model = case2_example1.create_model(copy=True) + +# implement the second model +case2_example3 = case2("case2_example3", model=case2_base_model) +case2_example3.create_model() + + +# ### Example 4: low target level in third basin, results in incorrect flow direction +# +# The water level in each basin, including the boundaries, are too low. No water should be flowing. However, only the initial level is higher than the target level of the third basin. Water should only flow from the third basin to the last level boundary. + + +# first, load in the simple model of case 1. Copy it. +case2_example4 = case1("case2_example4") +case2_base_model = case2_example4.create_model(copy=True) + +# implement the second model +case2_example4 = case2("case2_example4", model=case2_base_model) +case2_example4.create_model() + +# Volgende case zou kunnen zijn dat er tussen twee peilgebieden (met wel of niet andere target levels) er outlets zijn, die van de een naar de ander gaan, en andersom. Maar in principe verwacht ik daar geen gekke situaties: de outlet laat alleen water stromen als dit onder vrij verval kan, en pompt geen water omhoog. Het enige wat wel gek zou kunnen worden, is als beide peilen rond hetzelfde niveau komen. Dan zou het water de ene tijdstap van links naar rechts kunnen stromen, en de andere momenten de andere kant op. Ik kan me voorstellen dat dit tot instabiliteiten leidt. Weet alleen niet zeker of dit gaat optreden bij simpele voorbeelden als hier.Wat wel interessant zou zijn is het toch wel toevoegen van ContinuousControls. Eerst leek dit niet een logische stap, omdat ik wilde dat de outlet zou luisteren naar boven- en benedenstroomse peil. Maar nu doet dat het eigenlijk alleen naar bovenstrooms. + +# Punt van reflectie: is dat uberhaupt wel de goede aanpak? Hoe weet de basin dan dat het water moet doorlaten? Je kan dit doen door de crest level een stukje lager te zetten dan streefpeil, maar dat houdt dan wel in dat er ALTIJD water stroomt. Qua doorspoeling is dat opzich nog zo gek niet, maar het is niet de meest chique manier. Ook heb je hier dan weinig controle op, wat je misschien wel wil hebben. +# Conclusie(?): de discrete controls moeten OOK gaan luisteren naar benedenstroomse peil. Dit toch wel doen aan de hand van de vier verschillende opties, afhankelijk wat de streefpeil van peilgebied 1 en peilgebied 2 is. + +# Om instabiliteiten tegen te gaan is het wellicht goed om de grenzen niet precies op streefpeil te zetten, maar juist met een bepaalde afwijking. De afwijking om water door te mogen voeren van peilgebied 1 naar peilgebied2 moet kunnen zolang het waterniveau van peilgebied 1 niet 5 cm onder streefpeil zakt. De inlaat vanaf de boezem naar peilgebied 1 moet in principe sturen op exact streefpeil. 1) Hoe verhoudt dit zich tot de min_crest_level en een enkele listen_to_node? + +# De min_crest_level voert met 1 listen_node_id altijd water door. In het geval van hierboven zou water altijd van peilgebied 1 naar peilgebied 2 gaan, terwijl peilgebied 2 wellicht helemaal geen water nodig heeft terwijl de inlaat van de boezem naar peilgebied 1 wel water aan het inlaten is voor peilgebied 2. + + +# 2) Hoe verhoudt de min_crest_level zich met dubbele listen_to_nodes? + +# Min_Crest_level lijkt me bijna overbodig worden. + + +# 3) Stel er komt alleen een min_crest_level op de genoemde 5 cm onder streefpeil van peilgebied 1, dat wordt als het ware een schaduw listen node. Dan hoeft de outlet alleen nog maar te luisteren naar de basin die benedenstrooms ligt? + +# Dat is niet waar, want stel er is een minimum crest level gedefinieerd die 5 cm onder streefpeil ligt van peilgebied 1. Wat als de water stand 2 cm onder streefpeil ligt (dus wel nog boven crest level), en er is geen water nodig in peilgebied 2? --> dan blijft het stromen, terwijl dat niet moet.Conclusie: vorige conclusie is correct. Luisteren naar zowel boven- als benedestrooms.Stappenplan voor AGV: + +# Loopen per DiscreteControl lijkt mij geen goed idee. + + +# # Thrashbin + + +example1_characteristics = {} + + +# solver +example1_characteristics["starttime"] = "2024-01-01 00:00:00" +example1_characteristics["endtime"] = "2024-01-03 00:00:00" +example1_characteristics["saveat"] = 60 + +# boezem settings (LevelBoundary) +example1_characteristics["boezem1_level"] = 3 +example1_characteristics["boezem2_level"] = 3 + +# peilgebied settings (Basins) +example1_characteristics["basin1_profile_area"] = [0.01, 10000.0] +example1_characteristics["basin1_profile_level"] = [1.0, 5.0] +example1_characteristics["basin1_initial_level"] = [3] +example1_characteristics["basin1_target_level"] = [2] + + +example1_characteristics["basin2_profile_area"] = [0.01, 10000.0] +example1_characteristics["basin2_profile_level"] = [0.0, 5.0] +example1_characteristics["basin2_initial_level"] = [2] +example1_characteristics["basin2_target_level"] = [1] + +example1_characteristics["evaporation"] = 5 # mm/day, will be converted later to m/s +example1_characteristics["precipitation"] = 5 # mm/day, will be converted later to m/s + +# connection node settings (Outlets, Pumpts) +example1_characteristics["outlet1_flow_rate"] = 0.010 +example1_characteristics["outlet1_min_crest_level"] = 2.90 + +example1_characteristics["outlet2_flow_rate"] = 0.010 +example1_characteristics["outlet2_min_crest_level"] = 1.90 + +example1_characteristics["pump_flow_rate"] = 10 / 60 # [x] m3 / minute + +# general settings +example1_characteristics["plot"] = True +example1_characteristics["crs"] = "EPSG:4326" +example1_characteristics["case"] = "case1" +example1_characteristics["example"] = "example1" +example1_characteristics["results_dir"] = r"../../../../Outlet_tests/" +example1_characteristics["show_progress"] = False +example1_characteristics["show_results"] = True + +# solver settings +example1_characteristics["saveat"] = 60 diff --git a/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb b/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb deleted file mode 100644 index 3a3c832..0000000 --- a/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.ipynb +++ /dev/null @@ -1,572 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import datetime\n", - "import os\n", - "import pathlib\n", - "import warnings\n", - "\n", - "import load_ribasim # noqa: F401\n", - "import pandas as pd\n", - "import ribasim\n", - "import ribasim.nodes\n", - "\n", - "import peilbeheerst_model.ribasim_parametrization as ribasim_param\n", - "from peilbeheerst_model.add_storage_basins import AddStorageBasins\n", - "from peilbeheerst_model.controle_output import *\n", - "from peilbeheerst_model.ribasim_feedback_processor import RibasimFeedbackProcessor\n", - "\n", - "%reload_ext autoreload\n", - "%autoreload 2\n", - "warnings.filterwarnings(\"ignore\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define variables and model" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Set Config" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"AmstelGooienVecht\"\n", - "work_dir = pathlib.Path(f\"../../../../../Ribasim_updated_models/{waterschap}/modellen/{waterschap}_parametrized\")\n", - "ribasim_gpkg = work_dir.joinpath(\"database.gpkg\")\n", - "path_ribasim_toml = work_dir.joinpath(\"ribasim.toml\")\n", - "output_dir = work_dir.joinpath(\"results\")\n", - "\n", - "# Basin area percentage\n", - "regular_percentage = 10\n", - "boezem_percentage = 90\n", - "unknown_streefpeil = (\n", - " 0.00012345 # we need a streefpeil to create the profiles, Q(h)-relations, and af- and aanslag peil for pumps\n", - ")\n", - "\n", - "# Forcing settings\n", - "start_time = \"2024-01-01\"\n", - "timestep_size = \"d\"\n", - "timesteps = 2\n", - "delta_crest_level = 0.1 # delta waterlevel of boezem compared to streefpeil till no water can flow through an outlet\n", - "\n", - "default_level = -0.42 # default LevelBoundary level" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Process the feedback form" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "name = \"Ron Bruijns (HKV)\"\n", - "versie = \"2024_10_5\"\n", - "\n", - "feedback_excel = pathlib.Path(f\"../../../../../Ribasim_feedback/V1_formulieren/feedback_formulier_{waterschap}.xlsx\")\n", - "feedback_excel_processed = (\n", - " f\"../../../../..//Ribasim_feedback/V1_formulieren_verwerkt/feedback_formulier_{waterschap}_JA_processed.xlsx\"\n", - ")\n", - "\n", - "ribasim_toml = f\"../../../../../Ribasim_base_models/{waterschap}_boezemmodel_{versie}/ribasim.toml\"\n", - "output_folder = work_dir # f\"../../../../../Ribasim_updated_models/{waterschap}\"\n", - "\n", - "processor = RibasimFeedbackProcessor(\n", - " name, waterschap, versie, feedback_excel, ribasim_toml, output_folder, feedback_excel_processed\n", - ")\n", - "processor.run()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Load model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Load Ribasim model\n", - "with warnings.catch_warnings():\n", - " warnings.simplefilter(action=\"ignore\", category=FutureWarning)\n", - " ribasim_model = ribasim.Model(filepath=path_ribasim_toml)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Parameterization" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Nodes" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Basin (characteristics)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_param.validate_basin_area(ribasim_model)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# remove the basins of above in the feedback form" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Model specific tweaks" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "new_node_id = max(ribasim_model.edge.df.from_node_id.max(), ribasim_model.edge.df.to_node_id.max()) + 1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# change unknown streefpeilen to a default streefpeil\n", - "ribasim_model.basin.area.df.loc[\n", - " ribasim_model.basin.area.df[\"meta_streefpeil\"] == \"Onbekend streefpeil\", \"meta_streefpeil\"\n", - "] = str(unknown_streefpeil)\n", - "ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df[\"meta_streefpeil\"] == -9.999, \"meta_streefpeil\"] = str(\n", - " unknown_streefpeil\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_model.basin.area.df[\"meta_streefpeil\"] = ribasim_model.basin.area.df[\"meta_streefpeil\"].astype(float)\n", - "ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df.index == 195, \"meta_streefpeil\"] = -2.45\n", - "ribasim_model.basin.state.df.loc[ribasim_model.basin.state.df.index == 195, \"level\"] = -2.45" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "assert not pd.isnull(ribasim_model.basin.area.df.meta_streefpeil).any()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Implement standard profile and a storage basin" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Insert standard profiles to each basin. These are [depth_profiles] meter deep, defined from the streefpeil\n", - "ribasim_param.insert_standard_profile(\n", - " ribasim_model,\n", - " unknown_streefpeil=unknown_streefpeil,\n", - " regular_percentage=regular_percentage,\n", - " boezem_percentage=boezem_percentage,\n", - " depth_profile=2,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# remove after the feedback forms have been fixed\n", - "ribasim_model.basin.profile.df.loc[ribasim_model.basin.profile.df.index == 559, \"area\"] = 1\n", - "ribasim_model.basin.profile.df.loc[ribasim_model.basin.profile.df.index == 560, \"area\"] = 2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "add_storage_basins = AddStorageBasins(\n", - " ribasim_model=ribasim_model, exclude_hoofdwater=True, additional_basins_to_exclude=[]\n", - ")\n", - "\n", - "add_storage_basins.create_bergende_basins()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Basin (forcing)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Set static forcing\n", - "forcing_dict = {\n", - " \"precipitation\": ribasim_param.convert_mm_day_to_m_sec(10),\n", - " \"potential_evaporation\": ribasim_param.convert_mm_day_to_m_sec(0),\n", - " \"drainage\": ribasim_param.convert_mm_day_to_m_sec(0),\n", - " \"infiltration\": ribasim_param.convert_mm_day_to_m_sec(0),\n", - " # 'urban_runoff': ribasim_param.convert_mm_day_to_m_sec(0),\n", - "}\n", - "\n", - "ribasim_param.set_static_forcing(timesteps, timestep_size, start_time, forcing_dict, ribasim_model)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Pumps" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Set pump capacity for each pump\n", - "ribasim_model.pump.static.df[\"flow_rate\"] = 0.16667 # 10 kuub per minuut" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Convert all boundary nodes to LevelBoundaries" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_param.Terminals_to_LevelBoundaries(ribasim_model=ribasim_model, default_level=default_level) # clean\n", - "ribasim_param.FlowBoundaries_to_LevelBoundaries(ribasim_model=ribasim_model, default_level=default_level)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Add Outlet" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_param.add_outlets(ribasim_model, delta_crest_level=0.10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Add control, based on the meta_categorie" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_param.identify_node_meta_categorie(ribasim_model)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_param.find_upstream_downstream_target_levels(ribasim_model, node=\"outlet\")\n", - "ribasim_param.find_upstream_downstream_target_levels(ribasim_model, node=\"pump\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# ribasim_param.add_discrete_control(ribasim_model, waterschap, default_level)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_param.determine_min_upstream_max_downstream_levels(ribasim_model, waterschap)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Manning Resistance" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# there is a MR without geometry and without edges for some reason\n", - "ribasim_model.manning_resistance.node.df = ribasim_model.manning_resistance.node.df.dropna(subset=\"geometry\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Last formating of the tables" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# only retain node_id's which are present in the .node table\n", - "ribasim_param.clean_tables(ribasim_model)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_model.edge.df[\"fid\"] = ribasim_model.edge.df.index.copy()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Set numerical settings" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ribasim_model.use_validation = True" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Write model output\n", - "# ribasim_param.index_reset(ribasim_model)\n", - "ribasim_model.starttime = datetime.datetime(2024, 1, 1)\n", - "ribasim_model.endtime = datetime.datetime(2025, 1, 1)\n", - "ribasim_model.solver.saveat = 3600\n", - "ribasim_param.write_ribasim_model_Zdrive(ribasim_model, path_ribasim_toml)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Run Model" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Iterate over tabulated rating curves" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# try:\n", - "# ribasim_param.iterate_TRC(\n", - "# ribasim_param=ribasim_param,\n", - "# allowed_tolerance=0.02,\n", - "# max_iter=1,\n", - "# expected_difference=0.1,\n", - "# max_adjustment=0.25,\n", - "# cmd=[\"ribasim\", path_ribasim_toml],\n", - "# output_dir=output_dir,\n", - "# path_ribasim_toml=path_ribasim_toml,\n", - "# )\n", - "\n", - "# except Exception:\n", - "# logging.error(\"The model was not able to run. Log file:\")\n", - "# log_file_path = os.path.join(output_dir, \"ribasim.log\") # Update with the correct path to your log file\n", - "# try:\n", - "# with open(log_file_path) as log_file:\n", - "# log_content = log_file.read()\n", - "# print(log_content)\n", - "# except Exception as log_exception:\n", - "# logging.error(f\"Could not read the log file: {log_exception}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Write model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# control_dict = Control(work_dir = work_dir).run_all()\n", - "ribasim_param.write_ribasim_model_GoodCloud(\n", - " ribasim_model=ribasim_model,\n", - " path_ribasim_toml=path_ribasim_toml,\n", - " waterschap=waterschap,\n", - " modeltype=\"boezemmodel\",\n", - " include_results=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Open Output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "df_basin = pd.read_feather(os.path.join(output_dir, \"basin.arrow\"))\n", - "df_basin" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Add discrete control nodes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Add discrete control nodes and control edges\n", - "# ribasim_param.add_discrete_control_nodes(ribasim_model)\n", - "\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.py b/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.py new file mode 100644 index 0000000..75ab1f2 --- /dev/null +++ b/src/peilbeheerst_model/Parametrize/AmstelGooienVecht_parametrize.py @@ -0,0 +1,272 @@ +import datetime +import os +import pathlib +import warnings + +import load_ribasim # noqa: F401 +import pandas as pd +import ribasim +import ribasim.nodes + +import peilbeheerst_model.ribasim_parametrization as ribasim_param +from peilbeheerst_model.add_storage_basins import AddStorageBasins +from peilbeheerst_model.controle_output import * +from peilbeheerst_model.ribasim_feedback_processor import RibasimFeedbackProcessor + +get_ipython().run_line_magic("reload_ext", "autoreload") + +warnings.filterwarnings("ignore") + + +# ## Define variables and model + +# #### Set Config + + +waterschap = "AmstelGooienVecht" +work_dir = pathlib.Path(f"../../../../../Ribasim_updated_models/{waterschap}/modellen/{waterschap}_parametrized") +ribasim_gpkg = work_dir.joinpath("database.gpkg") +path_ribasim_toml = work_dir.joinpath("ribasim.toml") +output_dir = work_dir.joinpath("results") + +# Basin area percentage +regular_percentage = 10 +boezem_percentage = 90 +unknown_streefpeil = ( + 0.00012345 # we need a streefpeil to create the profiles, Q(h)-relations, and af- and aanslag peil for pumps +) + +# Forcing settings +start_time = "2024-01-01" +timestep_size = "d" +timesteps = 2 +delta_crest_level = 0.1 # delta waterlevel of boezem compared to streefpeil till no water can flow through an outlet + +default_level = -0.42 # default LevelBoundary level + + +# ## Process the feedback form + + +name = "Ron Bruijns (HKV)" +versie = "2024_10_5" + +feedback_excel = pathlib.Path(f"../../../../../Ribasim_feedback/V1_formulieren/feedback_formulier_{waterschap}.xlsx") +feedback_excel_processed = ( + f"../../../../..//Ribasim_feedback/V1_formulieren_verwerkt/feedback_formulier_{waterschap}_JA_processed.xlsx" +) + +ribasim_toml = f"../../../../../Ribasim_base_models/{waterschap}_boezemmodel_{versie}/ribasim.toml" +output_folder = work_dir # f"../../../../../Ribasim_updated_models/{waterschap}" + +processor = RibasimFeedbackProcessor( + name, waterschap, versie, feedback_excel, ribasim_toml, output_folder, feedback_excel_processed +) +processor.run() + + +# #### Load model + + +# Load Ribasim model +with warnings.catch_warnings(): + warnings.simplefilter(action="ignore", category=FutureWarning) + ribasim_model = ribasim.Model(filepath=path_ribasim_toml) + + +# # Parameterization + +# ## Nodes + +# ### Basin (characteristics) + + +ribasim_param.validate_basin_area(ribasim_model) + + +# remove the basins of above in the feedback form + + +# ## Model specific tweaks + + +new_node_id = max(ribasim_model.edge.df.from_node_id.max(), ribasim_model.edge.df.to_node_id.max()) + 1 + + +# change unknown streefpeilen to a default streefpeil +ribasim_model.basin.area.df.loc[ + ribasim_model.basin.area.df["meta_streefpeil"] == "Onbekend streefpeil", "meta_streefpeil" +] = str(unknown_streefpeil) +ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df["meta_streefpeil"] == -9.999, "meta_streefpeil"] = str( + unknown_streefpeil +) + + +ribasim_model.basin.area.df["meta_streefpeil"] = ribasim_model.basin.area.df["meta_streefpeil"].astype(float) +ribasim_model.basin.area.df.loc[ribasim_model.basin.area.df.index == 195, "meta_streefpeil"] = -2.45 +ribasim_model.basin.state.df.loc[ribasim_model.basin.state.df.index == 195, "level"] = -2.45 + + +assert not pd.isnull(ribasim_model.basin.area.df.meta_streefpeil).any() + + +# ## Implement standard profile and a storage basin + + +# Insert standard profiles to each basin. These are [depth_profiles] meter deep, defined from the streefpeil +ribasim_param.insert_standard_profile( + ribasim_model, + unknown_streefpeil=unknown_streefpeil, + regular_percentage=regular_percentage, + boezem_percentage=boezem_percentage, + depth_profile=2, +) + + +# remove after the feedback forms have been fixed +ribasim_model.basin.profile.df.loc[ribasim_model.basin.profile.df.index == 559, "area"] = 1 +ribasim_model.basin.profile.df.loc[ribasim_model.basin.profile.df.index == 560, "area"] = 2 + + +add_storage_basins = AddStorageBasins( + ribasim_model=ribasim_model, exclude_hoofdwater=True, additional_basins_to_exclude=[] +) + +add_storage_basins.create_bergende_basins() + + +# ### Basin (forcing) + + +# Set static forcing +forcing_dict = { + "precipitation": ribasim_param.convert_mm_day_to_m_sec(10), + "potential_evaporation": ribasim_param.convert_mm_day_to_m_sec(0), + "drainage": ribasim_param.convert_mm_day_to_m_sec(0), + "infiltration": ribasim_param.convert_mm_day_to_m_sec(0), + # 'urban_runoff': ribasim_param.convert_mm_day_to_m_sec(0), +} + +ribasim_param.set_static_forcing(timesteps, timestep_size, start_time, forcing_dict, ribasim_model) + + +# ### Pumps + + +# Set pump capacity for each pump +ribasim_model.pump.static.df["flow_rate"] = 0.16667 # 10 kuub per minuut + + +# ### Convert all boundary nodes to LevelBoundaries + + +ribasim_param.Terminals_to_LevelBoundaries(ribasim_model=ribasim_model, default_level=default_level) # clean +ribasim_param.FlowBoundaries_to_LevelBoundaries(ribasim_model=ribasim_model, default_level=default_level) + + +# ### Add Outlet + + +ribasim_param.add_outlets(ribasim_model, delta_crest_level=0.10) + + +# ## Add control, based on the meta_categorie + + +ribasim_param.identify_node_meta_categorie(ribasim_model) + + +ribasim_param.find_upstream_downstream_target_levels(ribasim_model, node="outlet") +ribasim_param.find_upstream_downstream_target_levels(ribasim_model, node="pump") + + +# ribasim_param.add_discrete_control(ribasim_model, waterschap, default_level) + + +ribasim_param.determine_min_upstream_max_downstream_levels(ribasim_model, waterschap) + + +# ### Manning Resistance + + +# there is a MR without geometry and without edges for some reason +ribasim_model.manning_resistance.node.df = ribasim_model.manning_resistance.node.df.dropna(subset="geometry") + + +# ## Last formating of the tables + + +# only retain node_id's which are present in the .node table +ribasim_param.clean_tables(ribasim_model) + + +ribasim_model.edge.df["fid"] = ribasim_model.edge.df.index.copy() + + +# # Set numerical settings + + +ribasim_model.use_validation = True + + +# Write model output +# ribasim_param.index_reset(ribasim_model) +ribasim_model.starttime = datetime.datetime(2024, 1, 1) +ribasim_model.endtime = datetime.datetime(2025, 1, 1) +ribasim_model.solver.saveat = 3600 +ribasim_param.write_ribasim_model_Zdrive(ribasim_model, path_ribasim_toml) + + +# ## Run Model + +# ## Iterate over tabulated rating curves + + +# try: +# ribasim_param.iterate_TRC( +# ribasim_param=ribasim_param, +# allowed_tolerance=0.02, +# max_iter=1, +# expected_difference=0.1, +# max_adjustment=0.25, +# cmd=["ribasim", path_ribasim_toml], +# output_dir=output_dir, +# path_ribasim_toml=path_ribasim_toml, +# ) + +# except Exception: +# logging.error("The model was not able to run. Log file:") +# log_file_path = os.path.join(output_dir, "ribasim.log") # Update with the correct path to your log file +# try: +# with open(log_file_path) as log_file: +# log_content = log_file.read() +# print(log_content) +# except Exception as log_exception: +# logging.error(f"Could not read the log file: {log_exception}") + + +# # Write model + + +# control_dict = Control(work_dir = work_dir).run_all() +ribasim_param.write_ribasim_model_GoodCloud( + ribasim_model=ribasim_model, + path_ribasim_toml=path_ribasim_toml, + waterschap=waterschap, + modeltype="boezemmodel", + include_results=True, +) + + +# ## Open Output + + +df_basin = pd.read_feather(os.path.join(output_dir, "basin.arrow")) +df_basin + + +# ### Add discrete control nodes + + +# Add discrete control nodes and control edges +# ribasim_param.add_discrete_control_nodes(ribasim_model) diff --git a/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb b/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb deleted file mode 100644 index 8e8a4c9..0000000 --- a/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.ipynb +++ /dev/null @@ -1,584 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Holandse Delta\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"Hollandse Delta\"\n", - "\n", - "# Define crossings file path\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " # print(row)\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " # print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - " print(len(gdf_object))\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - "\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_out.to_file(f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/Hollandse_Delta/{waterschap}_shortest_path2.gpkg\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py b/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py new file mode 100644 index 0000000..f982ef6 --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py @@ -0,0 +1,458 @@ +# # Holandse Delta +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "Hollandse Delta" + +# Define crossings file path +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + # print(row) + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + # print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + print(len(gdf_object)) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) + + +gdf_out.to_file(f"/DATAFOLDER/projects/4750_30/Data_shortest_path/Hollandse_Delta/{waterschap}_shortest_path2.gpkg") diff --git a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb deleted file mode 100644 index 33901f7..0000000 --- a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.ipynb +++ /dev/null @@ -1,642 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# HHSK\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"HHSK\"\n", - "\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " print(index)\n", - "\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py new file mode 100644 index 0000000..381c6e3 --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py @@ -0,0 +1,457 @@ +# # HHSK +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "HHSK" + + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + print(index) + + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) diff --git a/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb b/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb deleted file mode 100644 index 0bc9400..0000000 --- a/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.ipynb +++ /dev/null @@ -1,576 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# HHNK\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"HHNK\"\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " # print(row)\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " # print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - " print(len(gdf_object))\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - " print(gdf_cross_single)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - "\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py b/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py new file mode 100644 index 0000000..b75ac29 --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py @@ -0,0 +1,457 @@ +# # HHNK +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "HHNK" + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + # print(row) + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + # print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + print(len(gdf_object)) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + print(gdf_cross_single) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) diff --git a/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb b/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb deleted file mode 100644 index b8b102b..0000000 --- a/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.ipynb +++ /dev/null @@ -1,569 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Delfland\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"Delfland\"\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " print(index)\n", - "\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py b/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py new file mode 100644 index 0000000..8521b8b --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py @@ -0,0 +1,456 @@ +# # Delfland +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "Delfland" + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + print(index) + + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) diff --git a/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb b/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb deleted file mode 100644 index 2fe53cc..0000000 --- a/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.ipynb +++ /dev/null @@ -1,611 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Scheldestromen\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"Scheldestromen\"\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_rhws.plot()" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " print(index)\n", - "\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py b/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py new file mode 100644 index 0000000..288be37 --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py @@ -0,0 +1,456 @@ +# # Scheldestromen +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "Scheldestromen" + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +gdf_rhws.plot() + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + print(index) + + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) diff --git a/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb b/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb deleted file mode 100644 index 1539e39..0000000 --- a/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.ipynb +++ /dev/null @@ -1,581 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Zuiderzeeland\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"Zuiderzeeland\"\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "print(data_path)\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "DATA.keys()" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " # print(row)\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " # print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - " print(len(gdf_object))\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - "\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# message = pd.DataFrame()\n", - "# message['text'] = np.nan#'Zuiderzeeland is klaar'\n", - "# message.to_csv('Zuiderzeeland_is_klaar.txt')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py b/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py new file mode 100644 index 0000000..a128e10 --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py @@ -0,0 +1,461 @@ +# # Zuiderzeeland +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "Zuiderzeeland" + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" +print(data_path) +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +DATA.keys() + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + # print(row) + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + # print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + print(len(gdf_object)) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) + + +# message = pd.DataFrame() +# message['text'] = np.nan#'Zuiderzeeland is klaar' +# message.to_csv('Zuiderzeeland_is_klaar.txt') diff --git a/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb b/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb deleted file mode 100644 index 41b9297..0000000 --- a/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.ipynb +++ /dev/null @@ -1,591 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# WSRL\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"WSRL\"\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " print(index)\n", - "\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py b/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py new file mode 100644 index 0000000..2a9d15e --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py @@ -0,0 +1,456 @@ +# # WSRL +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "WSRL" + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + print(index) + + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) diff --git a/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb b/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb deleted file mode 100644 index a43b06c..0000000 --- a/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.ipynb +++ /dev/null @@ -1,592 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Wetterskip\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"Wetterskip\"\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " print(index)\n", - "\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py b/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py new file mode 100644 index 0000000..6a1d9bf --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py @@ -0,0 +1,457 @@ +# # Wetterskip +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "Wetterskip" + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + print(index) + + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) diff --git a/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb b/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb deleted file mode 100644 index 358c521..0000000 --- a/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.ipynb +++ /dev/null @@ -1,594 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Rijnland\n", - "\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps\n", - "\n", - "from peilbeheerst_model import waterschap_data" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"Rijnland\"\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = waterschap_data[waterschap][\"init\"][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " print(index)\n", - "\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 50 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py b/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py new file mode 100644 index 0000000..8ac7aa6 --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py @@ -0,0 +1,460 @@ +# # Rijnland +# +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Load Data + + +waterschap = "Rijnland" + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = waterschap_data[waterschap]["init"]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + print(index) + + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + except Exception as e: + print(e) + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) + + +1 diff --git a/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.ipynb b/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.ipynb deleted file mode 100644 index d3481c9..0000000 --- a/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.ipynb +++ /dev/null @@ -1,909 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# AGV\n", - "\n", - "\n", - "### Create shortest_path RHWS network\n", - "\n", - "Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "import fiona\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import tqdm.auto as tqdm\n", - "from shapely.geometry import LineString, MultiLineString, Point\n", - "from shapely.ops import split\n", - "from shapely.wkt import dumps" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "### Load Data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap1 = \"AmstelGooienVecht\"\n", - "waterschap2 = \"AGV\"\n", - "waterschap = waterschap1\n", - "\n", - "# Define crossings file path\n", - "path2json = \"/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json\"\n", - "data_path_str = pd.read_json(path2json).loc[\"init\"][waterschap1][\"output_path\"]\n", - "data_path = f\"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}\"\n", - "\n", - "\n", - "# Load crossings file\n", - "DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)}" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": {}, - "source": [ - "### Select rhws" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Select RHWS peilgebeied & calculate representative point\n", - "gdf_rhws = DATA[\"peilgebied\"].loc[DATA[\"peilgebied\"][\"peilgebied_cat\"] == 1].copy()\n", - "gdf_rhws[\"representative_point\"] = gdf_rhws.representative_point()\n", - "\n", - "# Apply aggregation level based filter\n", - "gdf_cross = (\n", - " DATA[\"crossings_hydroobject_filtered\"].loc[DATA[\"crossings_hydroobject_filtered\"][\"agg_links_in_use\"]].copy()\n", - ") # filter aggregation level" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Define functions\n", - "1. splitting functions\n", - "2. connect graphs functions\n", - "3. explode nodes functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "def split_line_at_point(line, point):\n", - " buff = point.buffer(1e-4) # Small buffer around the point\n", - " split_result = split(line, buff)\n", - " if len(split_result.geoms) in [2, 3]:\n", - " # Assume first and last segments are the result, ignore tiny middle segment if exists\n", - " result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]])\n", - " else:\n", - " # Return the original line as a MultiLineString for consistency if no split occurred\n", - " result = MultiLineString([line])\n", - " return result\n", - "\n", - "\n", - "def split_lines_at_intersections(gdf_object):\n", - " split_lines = []\n", - " gdf_object.drop(columns=[\"geometry\"]) # Preserve non-geometry attributes\n", - "\n", - " for idx, row in gdf_object.iterrows():\n", - " was_split = False\n", - "\n", - " # Get potential intersections using spatial index\n", - " possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds))\n", - " possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self\n", - " precise_matches = possible_matches[possible_matches.intersects(row.geometry)]\n", - "\n", - " for match_idx, match in precise_matches.iterrows():\n", - " if row.geometry.intersects(match.geometry):\n", - " intersection = row.geometry.intersection(match.geometry)\n", - " if isinstance(intersection, Point):\n", - " # Split the current line at the intersection point\n", - " try:\n", - " split_result = split_line_at_point(row.geometry, intersection)\n", - " for geom in split_result.geoms:\n", - " new_row = row.copy()\n", - " new_row.geometry = geom\n", - " split_lines.append(new_row)\n", - " was_split = True\n", - " except ValueError as e:\n", - " print(f\"Error splitting line: {e}\")\n", - " # Add other intersection types handling if needed\n", - " break # Assumes only one split per line; remove or modify for multiple splits\n", - "\n", - " if not was_split:\n", - " # If the line was not split, include the original line\n", - " split_lines.append(row)\n", - "\n", - " # Create a new GeoDataFrame from the split or original lines\n", - " result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns)\n", - " return result_gdf\n", - "\n", - "\n", - "def component_to_gdf(component, node_geometries):\n", - " geometries = [node_geometries[node] for node in component]\n", - " return gpd.GeoDataFrame(geometry=geometries, index=list(component))\n", - "\n", - "\n", - "def connect_components(graph, node1, node2, node_geometries):\n", - " geom1 = node_geometries[node1]\n", - " geom2 = node_geometries[node2]\n", - " new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]])\n", - " graph.add_edge(node1, node2, geometry=new_edge_geom)\n", - "\n", - "\n", - "def find_closest_component_pair(largest_gdf, smaller_gdfs):\n", - " print(len(smaller_gdfs), end=\"\\r\")\n", - " sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs])\n", - " nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True)\n", - " li, si = nearest_i[:, np.argmin(dist2)]\n", - "\n", - " nearest_idx, dist = smaller_gdfs[si].sindex.nearest(\n", - " largest_gdf.geometry.iat[li], return_all=False, return_distance=True\n", - " )\n", - " node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]]\n", - " node_in_largest = largest_gdf.index[li]\n", - " closest_pair_nodes = (node_in_largest, node_in_smaller)\n", - " # print(\"done\")\n", - " return si, closest_pair_nodes\n", - "\n", - "\n", - "def cut_linestring_at_interval(line, interval):\n", - " \"\"\"Cut a LineString into segments of a specified interval.\"\"\"\n", - " # Calculate the number of segments needed\n", - " num_segments = int(np.ceil(line.length / interval))\n", - " if num_segments == 1:\n", - " return [line]\n", - "\n", - " points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)]\n", - " return [LineString([points[i], points[i + 1]]) for i in range(num_segments)]\n", - "\n", - "\n", - "def explode_linestrings(gdf, interval):\n", - " \"\"\"Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.\"\"\"\n", - " segments = []\n", - " for _, row in gdf.iterrows():\n", - " line = row.geometry\n", - " segments.extend(cut_linestring_at_interval(line, interval))\n", - "\n", - " return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs)\n", - "\n", - "\n", - "def connect_linestrings_within_distance(gdf, max_distance=4):\n", - " gdf = gdf.explode(ignore_index=False, index_parts=True)\n", - " gdf[\"geometry\"] = gdf.make_valid()\n", - " gdf[\"geometry\"] = gdf.geometry.apply(shapely.force_2d)\n", - " gdf = gdf[~gdf.is_empty].copy()\n", - "\n", - " change_idx, change_geom = [], []\n", - " for row in tqdm.tqdm(\n", - " gdf.itertuples(),\n", - " total=len(gdf),\n", - " ):\n", - " ps = row.geometry.boundary.geoms\n", - " if len(ps) != 2:\n", - " continue\n", - " p0, p1 = ps\n", - "\n", - " p0_changed, p1_changed = False, False\n", - " idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx0) > 0:\n", - " dist0 = gdf.iloc[idx0].distance(p0)\n", - " if (dist0 > 10e-8).any():\n", - " snap_lbl0 = dist0[dist0 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl0]\n", - " p0 = geom.interpolate(geom.project(p0))\n", - " p0_changed = True\n", - "\n", - " idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate=\"intersects\")\n", - " if len(idx1) > 0:\n", - " dist1 = gdf.iloc[idx1].distance(p1)\n", - " if (dist1 > 10e-8).any():\n", - " snap_lbl1 = dist1[dist1 > 10e-8].idxmin()\n", - " geom = gdf.geometry.at[snap_lbl1]\n", - " p1 = geom.interpolate(geom.project(p1))\n", - " p1_changed = True\n", - "\n", - " if p0_changed or p1_changed:\n", - " coords = list(row.geometry.coords)\n", - " if p0_changed:\n", - " coords = list(p0.coords) + coords\n", - " if p1_changed:\n", - " coords = coords + list(p1.coords)\n", - " change_idx.append(row.Index)\n", - " change_geom.append(LineString(coords))\n", - "\n", - " if len(change_idx) > 0:\n", - " gdf.loc[change_idx, \"geometry\"] = change_geom\n", - "\n", - " return gdf" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "# Shortest Path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour=\"blue\"):\n", - " try:\n", - " # print(row)\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - "\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - " # print(\"Clip Crossings/Hydroobjects\")\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - " print(\"Split Hydroobjects at Intersect\")\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - " print(len(gdf_object))\n", - "\n", - " print(\"Connect Hydroobjects within distance\")\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 5 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " print(\"Find distruptions in Graph\")\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - "\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " print(len(smaller_components), end=\"\\r\")\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap1} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap1}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path[\"geometry\"] = shortest_path.apply(\n", - " lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1\n", - " )\n", - "\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap1}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - "\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap2}/{waterschap2}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "gdf_crossings_out = []\n", - "gdf_rhws = gdf_rhws.reset_index(drop=True)\n", - "\n", - "\n", - "# Loop RHWS polygons\n", - "gdf_crossings_out = []\n", - "\n", - "for index, rhws in gdf_rhws.iterrows():\n", - " try:\n", - " print(index)\n", - " ### Select Crossings/Hydroobjects ###\n", - " print(\"Select Crossings/Hydroobjects\")\n", - " # Single RHWS row as GeoDataFrame\n", - " gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry=\"geometry\", crs=gdf_rhws.crs)\n", - "\n", - " # Select for each boezem polygon the relevant crossings\n", - " globalid_value = gdf_rhws_single.globalid.iloc[0]\n", - " gdf_cross_single = gdf_cross[\n", - " (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value)\n", - " ].copy()\n", - "\n", - " # Select hydroobjects in RHWS polygons\n", - " gdf_object = gpd.clip(DATA[\"hydroobject\"], gdf_rhws_single)\n", - " gdf_object = gdf_object.reset_index(drop=True)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " # Split lines at intersection\n", - " gdf_object = split_lines_at_intersections(gdf_object)\n", - "\n", - " # Explode the linestrings into smaller segments\n", - " distance_interval = 200 # The distance interval you want to segment the lines at\n", - " gdf_object = explode_linestrings(gdf_object, distance_interval)\n", - "\n", - " # Make sure that hydroobjects are connected\n", - " gdf_object = connect_linestrings_within_distance(gdf_object)\n", - "\n", - " # Explode linestrings\n", - " gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True)\n", - " gdf_object = gdf_object[~gdf_object.is_empty].copy()\n", - " gdf_object = gdf_object[gdf_object.length > 1e-7].copy()\n", - "\n", - " ### Create NetworkX nodes ###\n", - " print(\"Create NetworkX\")\n", - " # Use start and end points from hydroobjects in networkx as nodes\n", - " nodes_gdf = gdf_object.copy()\n", - " nodes_gdf[\"geometry\"] = nodes_gdf.geometry.boundary\n", - " nodes_gdf = nodes_gdf.explode(index_parts=True)\n", - "\n", - " # Use the unique points as nodes in networkx\n", - " nodes_gdf.insert(0, \"node_id\", -1)\n", - " node_id = 1\n", - " for geom, group in nodes_gdf.groupby(\"geometry\"):\n", - " nodes_gdf.loc[group.index, \"node_id\"] = node_id\n", - " node_id += 1\n", - "\n", - " ### Select startpoints & endpoints RHWS network ###\n", - " # Find the closest starting points from the crossings.\n", - " # Keep only points which are (almost) equal to the crossings.\n", - " startpoints, distances = nodes_gdf.sindex.nearest(\n", - " gdf_cross_single.geometry, return_all=False, return_distance=True\n", - " )\n", - " startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values\n", - "\n", - " gdf_cross_single[\"node_id\"] = startpoints\n", - " gdf_cross_single[\"node_id_distance\"] = distances\n", - "\n", - " # find the node_id closest to the RHWS representative point (end point)\n", - " # Exclude the points which are already used as starting points\n", - " df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy()\n", - " endpoint, distance = df_endpoint.sindex.nearest(\n", - " rhws.representative_point, return_all=False, return_distance=True\n", - " )\n", - "\n", - " endpoint = df_endpoint.node_id.iat[endpoint[1, 0]]\n", - " gdf_rhws_single[\"node_id\"] = endpoint\n", - " gdf_rhws_single[\"node_id_distance\"] = distance\n", - "\n", - " ### Create networkx graph ###\n", - " graph = nx.Graph()\n", - "\n", - " # add nodes in boezem\n", - " for node_id, group in nodes_gdf.groupby(\"node_id\"):\n", - " graph.add_node(node_id, geometry=group.geometry.iat[0])\n", - "\n", - " # add edges\n", - " line_lookup = gdf_object.geometry\n", - " for idx0, group in nodes_gdf.groupby(level=0):\n", - " node_from, node_to = group.node_id\n", - " line_geom = gdf_object.geometry.at[idx0]\n", - " graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom)\n", - "\n", - " ### Find distruptions Graph ###\n", - " # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes\n", - " # Get lists of compnents (sub-graph)\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " print(len(smaller_components), end=\"\\r\")\n", - " while True:\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component]\n", - "\n", - " if not smaller_components: # If there are no smaller components left, break the loop\n", - " break\n", - "\n", - " # Update node geometries and largest_gdf for each iteration\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - " largest_gdf = component_to_gdf(largest_component, node_geometries)\n", - " smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components]\n", - "\n", - " # Find the closest smaller_gdf to the largest_gdf\n", - " closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs)\n", - "\n", - " # Connect the closest nodes\n", - " connect_components(graph, node_in_largest, node_in_smaller, node_geometries)\n", - "\n", - " # calculate shortest_path networkx\n", - " gdf_cross_single[\"shortest_path\"] = shapely.geometry.GeometryCollection()\n", - " not_connected = []\n", - "\n", - " components = list(nx.connected_components(graph))\n", - " largest_component = max(components, key=len)\n", - " smaller_components = [comp for comp in components if comp != largest_component] # not used anymore\n", - " node_geometries = {node: graph.nodes[node][\"geometry\"] for node in graph.nodes()}\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = shapely.ops.linemerge(\n", - " edges\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " if not_connected:\n", - " print(\"not connected\")\n", - " # Force connection\n", - " # Convert the largest connected component to a GeoDataFrame for spatial operations\n", - " largest_component_gdf = gpd.GeoDataFrame(\n", - " geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs\n", - " )\n", - " largest_component_gdf[\"node_id\"] = list(largest_component)\n", - "\n", - " # Iterate over each not_connected node\n", - " for nc_node in not_connected:\n", - " nc_node_geom = node_geometries[nc_node]\n", - "\n", - " # Calculate the distance to all nodes in the largest component\n", - " distances = largest_component_gdf.geometry.distance(nc_node_geom)\n", - "\n", - " # Find the closest node in the largest component\n", - " closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id\n", - "\n", - " # Add edge between not_connected node and closest node in the largest component\n", - " # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements\n", - " graph.add_edge(\n", - " nc_node,\n", - " closest_node_id,\n", - " geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]),\n", - " )\n", - "\n", - " for startpoint in startpoints:\n", - " try:\n", - " shortest_path = nx.shortest_path(\n", - " graph, source=startpoint, target=endpoint, weight=\"length\", method=\"dijkstra\"\n", - " )\n", - " edges = []\n", - " for i in range(0, len(shortest_path) - 1):\n", - " edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])[\"geometry\"])\n", - " gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, \"shortest_path\"] = (\n", - " shapely.ops.linemerge(edges)\n", - " )\n", - "\n", - " except nx.NetworkXNoPath as e:\n", - " print(e)\n", - " not_connected.append(startpoint)\n", - "\n", - " ### Append output ###\n", - " gdf_crossings_out.append(gdf_cross_single)\n", - "\n", - " ### Plot graph ###\n", - " print(\"Plotting Output\")\n", - " fig, ax = plt.subplots(figsize=(8, 8))\n", - " plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry=\"shortest_path\", crs=gdf_cross_single.crs)\n", - " plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs)\n", - " plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs)\n", - " ax.set_title(f\"{waterschap} shortest paths {index}\")\n", - " plt_rhws.plot(ax=ax, color=\"green\")\n", - " gdf_rhws_single.plot(ax=ax, color=\"lightblue\")\n", - " plt_rep.plot(ax=ax, color=\"blue\", label=\"representative_point\")\n", - " gdf_object.plot(ax=ax, color=\"gray\", linewidth=0.5, label=\"hydroobjects\")\n", - " gdf_cross_single.plot(ax=ax, color=\"orange\", label=\"crossings\")\n", - " plt_paths.plot(ax=ax, color=\"purple\", label=\"shortest paths\")\n", - " ax.legend()\n", - " plt.savefig(f\"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new\", dpi=300)\n", - "\n", - " # Save results\n", - " print(\"Writing Output\")\n", - " objects = {}\n", - " objects[\"hydroobjects\"] = gpd.GeoDataFrame(gdf_object, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " shortest_path = gdf_cross_single.drop(columns=[\"geometry\"])\n", - " shortest_path = shortest_path.rename(columns={\"shortest_path\": \"geometry\"})\n", - " shortest_path = gpd.GeoDataFrame(shortest_path, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - " objects[\"shortest_path\"] = shortest_path\n", - " objects[\"rhws\"] = gpd.GeoDataFrame(gdf_rhws_single, geometry=\"geometry\", crs=gdf_rhws_single.crs).drop(\n", - " columns=[\"representative_point\"]\n", - " )\n", - " objects[\"crossings\"] = gdf_cross_single.drop(columns=[\"shortest_path\"])\n", - " objects[\"representative_point\"] = gpd.GeoDataFrame(\n", - " gdf_rhws_single, geometry=\"representative_point\", crs=gdf_rhws_single.crs\n", - " ).drop(columns=[\"geometry\"])\n", - " objects[\"nodes\"] = gpd.GeoDataFrame(nodes_gdf, geometry=\"geometry\", crs=gdf_cross_single.crs)\n", - "\n", - " for key, value in objects.items():\n", - " # For each GeoDataFrame, save it to a layer in the GeoPackage\n", - " value.to_file(\n", - " f\"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg\", layer=key, driver=\"GPKG\"\n", - " )\n", - " except Exception as e:\n", - " print(e)\n", - "\n", - "\n", - "# Write final output\n", - "gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out))\n", - "gdf_out[\"shortest_path\"] = gdf_out[\"shortest_path\"].apply(lambda geom: dumps(geom) if geom is not None else None)\n", - "gdf_out.to_file(\n", - " f\"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg\", driver=\"GPKG\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py b/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py new file mode 100644 index 0000000..cf4e8a3 --- /dev/null +++ b/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py @@ -0,0 +1,701 @@ +# # AGV +# +# +# ### Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 +# + + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +# ### Load Data + + +waterschap1 = "AmstelGooienVecht" +waterschap2 = "AGV" +waterschap = waterschap1 + +# Define crossings file path +path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" +data_path_str = pd.read_json(path2json).loc["init"][waterschap1]["output_path"] +data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + + +# Load crossings file +DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + +# ### Select rhws + + +# Select RHWS peilgebeied & calculate representative point +gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() +gdf_rhws["representative_point"] = gdf_rhws.representative_point() + +# Apply aggregation level based filter +gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() +) # filter aggregation level + + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +# # Shortest Path + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + +# Loop RHWS polygons +gdf_crossings_out = [] + +for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + # print(row) + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + # print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + print(len(gdf_object)) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 5 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap1} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap1}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap1}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + + except Exception as e: + print(e) + + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap2}/{waterschap2}_shortest_path.gpkg", driver="GPKG" +) + + +gdf_crossings_out = [] +gdf_rhws = gdf_rhws.reset_index(drop=True) + + +# Loop RHWS polygons +gdf_crossings_out = [] + +for index, rhws in gdf_rhws.iterrows(): + try: + print(index) + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + # Explode the linestrings into smaller segments + distance_interval = 200 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + line_lookup = gdf_object.geometry + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( + edges + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + except Exception as e: + print(e) + + +# Write final output +gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) +gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out.to_file( + f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" +) diff --git a/src/peilbeheerst_model/compute_voronoi.ipynb b/src/peilbeheerst_model/compute_voronoi.ipynb deleted file mode 100644 index 1cc3562..0000000 --- a/src/peilbeheerst_model/compute_voronoi.ipynb +++ /dev/null @@ -1,366 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "import centerline.geometry\n", - "import geopandas as gpd\n", - "import networkx as nx\n", - "import numpy as np\n", - "import pandas as pd\n", - "import shapely\n", - "import shapely.geometry\n", - "import tqdm.auto as tqdm" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "df = gpd.read_file(\"../../../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip.gpkg\", layer=\"peilgebied\")\n", - "df[\"geometry\"] = df.buffer(0)\n", - "df = df[~df.is_empty].copy()\n", - "df[\"geometry\"] = df.geometry.apply(shapely.force_2d)\n", - "df = df[df.peilgebied_cat == 1].copy()\n", - "\n", - "df_crossings = gpd.read_file(\n", - " \"../../../../Data_crossings/Wetterskip/wetterskip_crossings_v05.gpkg\", layer=\"crossings_hydroobject_filtered\"\n", - ")\n", - "df_crossings = df_crossings[df_crossings.agg_links_in_use].copy()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "# Merge polygons with a small buffer. Ook nodig om verbindingen te krijgen in sommige smalle watergangen.\n", - "df_merged = df.buffer(1.0).unary_union\n", - "df_merged = gpd.GeoDataFrame(geometry=list(df_merged.geoms), crs=df.crs)\n", - "\n", - "# add merged id to original polygons\n", - "merged_poly_ids = []\n", - "for row in tqdm.tqdm(df.itertuples(), total=len(df)):\n", - " idxs = df_merged.sindex.query(row.geometry, predicate=\"intersects\")\n", - " if len(idxs) == 0:\n", - " raise ValueError(\"no matches\")\n", - " elif len(idxs) > 1:\n", - " overlaps = []\n", - " for idx in idxs:\n", - " overlap = df_merged.iat[idx].intersection(row.geometry).area / row.geometry.area\n", - " overlaps.append(overlap)\n", - " idx = idxs.index(max(overlaps))\n", - " else:\n", - " idx = idxs[0]\n", - " merged_poly_ids.append(idx)\n", - "df[\"merged_poly_id\"] = merged_poly_ids" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "df_center = []\n", - "for idx, row in tqdm.tqdm(df_merged.iterrows(), total=len(df_merged)):\n", - " geom = row.geometry\n", - " interp_dist = 10\n", - " if geom.area < 1000:\n", - " interp_dist = 1\n", - " if geom.area < 100:\n", - " interp_dist = 0.1\n", - " if geom.area < 10:\n", - " interp_dist = 0.01\n", - " if geom.area < 1:\n", - " interp_dist = 0.001\n", - " centerpoly = centerline.geometry.Centerline(geom, interpolation_distance=interp_dist)\n", - " centerpoly = centerpoly.geometry\n", - " centerpoly = centerpoly.simplify(1, preserve_topology=True)\n", - " df_center.append(centerpoly)\n", - "df_center = gpd.GeoDataFrame(geometry=list(df_center), crs=df.crs)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "df_center_single = df_center.explode(index_parts=False)\n", - "df_center_single = df_center_single.set_index(np.arange(len(df_center_single)), append=True)\n", - "df_center_single.index.set_names([\"poly_id\", \"edge_id\"], inplace=True)\n", - "\n", - "df_center_single_boundary = df_center_single.copy()\n", - "df_center_single_boundary[\"geometry\"] = df_center_single.boundary\n", - "\n", - "# # Check of alles mooi verbonden is\n", - "# for i, row in tqdm.tqdm(enumerate(df_center_single_boundary.itertuples()), total=len(df_center_single_boundary), desc=\"check connections\"):\n", - "# idx = row.Index\n", - "# geom = row.geometry\n", - "\n", - "# idxs, dists = df_center_single_boundary.sindex.nearest(geom, return_distance=True, return_all=True)\n", - "# idxs = idxs[1, :]\n", - "# dists = dists[idxs != i]\n", - "# idxs = idxs[idxs != i]\n", - "# if dists.min() > 0:\n", - "# print(f\"no closed connection for {idx}, {dist.min()=}\")\n", - "# elif len(idxs) == 0:\n", - "# print(f\"No connection for {idx}: {df_center_single_boundary.iloc[idxs].index}\")\n", - "\n", - "df_center_single_boundary_points = df_center_single_boundary.explode(index_parts=True)\n", - "df_center_single_boundary_points[\"node_id\"] = None\n", - "df_center_single_boundary_points[\"connectivity\"] = None\n", - "\n", - "node_id = 0\n", - "idxs, node_ids, connectivity = [], [], []\n", - "for poly_id, poly_group in tqdm.tqdm(\n", - " df_center_single_boundary_points.groupby(\"poly_id\", sort=False), desc=\"assign node ids\"\n", - "):\n", - " for geom, group in tqdm.tqdm(poly_group.groupby(\"geometry\", sort=False), desc=f\"{poly_id=}\", leave=False):\n", - " idxs.append(group.index)\n", - " node_ids.append(len(group) * [node_id])\n", - " connectivity.append(len(group) * [len(group)])\n", - " node_id += 1\n", - "\n", - "df_center_single_boundary_points.loc[np.hstack(idxs), \"node_id\"] = np.hstack(node_ids)\n", - "df_center_single_boundary_points.loc[np.hstack(idxs), \"connectivity\"] = np.hstack(connectivity)\n", - "idxs, node_ids, connectivity = None, None, None\n", - "\n", - "assert not pd.isna(df_center_single_boundary_points.node_id).any()\n", - "assert not pd.isna(df_center_single_boundary_points.connectivity).any()\n", - "\n", - "df_center_single_boundary_points = df_center_single_boundary_points.droplevel(-1).set_index(\"node_id\", append=True)\n", - "df_center_single_boundary_points" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# Alleen edges proberen te mergen waarvan beide uiteindes (nodes) connectivity 2 hebben\n", - "pot_reduce = []\n", - "for edge_id, group in tqdm.tqdm(\n", - " df_center_single_boundary_points.groupby(\"edge_id\", sort=False), desc=\"Find connectivity=2\"\n", - "):\n", - " if (group.connectivity == 2).all():\n", - " pot_reduce.append(edge_id)\n", - "pot_reduce = df_center_single_boundary.loc[pd.IndexSlice[:, pot_reduce], :].copy()\n", - "\n", - "# Identify merge groups\n", - "edges_visited = {}\n", - "merge_group = 0\n", - "pot_reduce[\"merge_group\"] = None\n", - "for poly_id, polygroup in tqdm.tqdm(pot_reduce.groupby(\"poly_id\", sort=False), desc=\"group edges per polygon\"):\n", - " for edge_id, group in tqdm.tqdm(polygroup.groupby(\"edge_id\", sort=False), leave=False, desc=f\"{poly_id=}\"):\n", - " if edge_id in edges_visited:\n", - " continue\n", - "\n", - " ivec = np.where(polygroup.index.isin(group.index))[0]\n", - " prev_len = 0\n", - " while len(ivec) != prev_len:\n", - " prev_len = len(ivec)\n", - " ivec = polygroup.sindex.query(polygroup.geometry.iloc[ivec], predicate=\"intersects\")\n", - " ivec = np.unique(ivec[1, :])\n", - "\n", - " lbls = polygroup.index[ivec]\n", - " assert len(pot_reduce.loc[lbls].index.get_level_values(\"poly_id\").unique()) == 1\n", - " pot_reduce.loc[lbls, \"merge_group\"] = merge_group\n", - "\n", - " for eid in lbls.get_level_values(\"edge_id\"):\n", - " edges_visited[eid] = True\n", - " merge_group += 1\n", - "\n", - "# Merge\n", - "df_center_single_red = df_center_single[~df_center_single.index.isin(pot_reduce.index)].copy()\n", - "add_rows = []\n", - "for group_id, group in tqdm.tqdm(pot_reduce.groupby(\"merge_group\", dropna=True, sort=False), desc=\"merge edges\"):\n", - " edges_to_merge = np.unique(group.index.get_level_values(\"edge_id\").to_numpy())\n", - " geoms = df_center_single.geometry.loc[pd.IndexSlice[:, edges_to_merge]].tolist()\n", - " geom = shapely.ops.linemerge(geoms)\n", - " assert geom.geom_type == \"LineString\"\n", - " single_row = df_center_single.loc[pd.IndexSlice[:, edges_to_merge[0]], :].copy()\n", - " single_row.loc[:, \"geometry\"] = geom\n", - " add_rows.append(single_row)\n", - "\n", - "# Overwrite dataframes\n", - "df_center_single = pd.concat([df_center_single_red] + add_rows)\n", - "\n", - "df_center_single_boundary = df_center_single.copy()\n", - "df_center_single_boundary[\"geometry\"] = df_center_single.boundary\n", - "\n", - "df_center_single_boundary_points = df_center_single_boundary.explode(index_parts=True)\n", - "df_center_single_boundary_points[\"node_id\"] = None\n", - "idxs, node_ids = [], []\n", - "for node_id, (geom, group) in enumerate(\n", - " tqdm.tqdm(df_center_single_boundary_points.groupby(\"geometry\", sort=False), desc=\"assign node ids\")\n", - "):\n", - " idxs.append(group.index)\n", - " node_ids.append(len(group) * [node_id])\n", - "df_center_single_boundary_points.loc[np.hstack(idxs), \"node_id\"] = np.hstack(node_ids)\n", - "assert not pd.isna(df_center_single_boundary_points.node_id).any()\n", - "df_center_single_boundary_points = df_center_single_boundary_points.droplevel(-1).set_index(\"node_id\", append=True)\n", - "\n", - "# # Check of alles mooi verbonden is\n", - "# for i, row in tqdm.tqdm(enumerate(df_center_single_boundary.itertuples()), total=len(df_center_single_boundary), desc=\"check connections\"):\n", - "# idx = row.Index\n", - "# geom = row.geometry\n", - "\n", - "# idxs, dists = df_center_single_boundary.sindex.nearest(geom, return_distance=True, return_all=True)\n", - "# idxs = idxs[1, :]\n", - "# dists = dists[idxs != i]\n", - "# idxs = idxs[idxs != i]\n", - "# if dists.min() > 0:\n", - "# print(f\"no closed connection for {idx}, {dist.min()=}\")\n", - "# elif len(idxs) == 0:\n", - "# print(f\"No connection for {idx}: {df_center_single_boundary.iloc[idxs].index}\")\n", - "\n", - "df_center_single_boundary_points" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "edge_lengths = dict(zip(df_center_single.index.get_level_values(\"edge_id\"), df_center_single.length))\n", - "shortest_paths = {\"poly_id\": [], \"start_node\": [], \"end_node\": [], \"geometry\": []}\n", - "for poly_id, row in tqdm.tqdm(df_merged.iterrows(), total=len(df_merged)):\n", - " merged_poly = row.geometry\n", - "\n", - " globalids = df.globalid.loc[df.merged_poly_id == poly_id].unique()\n", - " df_crossings_single = df_crossings[\n", - " df_crossings.peilgebied_from.isin(globalids) | df_crossings.peilgebied_to.isin(globalids)\n", - " ].copy()\n", - "\n", - " # End point\n", - " df_graph = df_center_single_boundary_points.loc[pd.IndexSlice[poly_id, :, :], :].copy()\n", - " idx_end, distance_end = df_graph.sindex.nearest(\n", - " merged_poly.representative_point(), return_distance=True, return_all=False\n", - " )\n", - " distance_end = distance_end[0]\n", - " idx_end = idx_end[1, 0]\n", - " idx_end = df_graph.index[idx_end]\n", - " end_node = idx_end[-1]\n", - " df_crossings\n", - " # print(f\"{poly_id=}, closest vertex for endpoint at {distance_end:.2f}m ({idx_end=})\")\n", - "\n", - " # Starting points\n", - " idxs, distances = df_graph.sindex.nearest(df_crossings_single.geometry, return_distance=True, return_all=False)\n", - " idx_cross = df_crossings_single.iloc[idxs[0, :]].index\n", - " df_crossings_single.loc[idx_cross, \"start_node\"] = df_graph.iloc[idxs[1, :]].index.get_level_values(\"node_id\")\n", - " df_crossings.loc[idx_cross, \"start_node\"] = df_graph.iloc[idxs[1, :]].index.get_level_values(\"node_id\")\n", - " start_nodes = df_crossings_single[\"start_node\"].dropna().unique().astype(int).tolist()\n", - "\n", - " # Make network for this polygon\n", - " node_ids = df_graph.index.get_level_values(\"node_id\")\n", - " graph = nx.Graph()\n", - "\n", - " # Add nodes and edges\n", - " graph.add_nodes_from(node_ids.unique().tolist())\n", - " for edge_id, group in df_graph.groupby(\"edge_id\", sort=False):\n", - " node1, node2 = group.index.get_level_values(\"node_id\").tolist()\n", - " graph.add_edge(node1, node2, weight=edge_lengths[edge_id])\n", - "\n", - " # Determine shortest path for each start node\n", - " for start_node in tqdm.tqdm(start_nodes, leave=False, desc=f\"{poly_id=}\"):\n", - " try:\n", - " # node_path = nx.dijkstra_path(graph, start_node, end_node)\n", - " node_path = nx.astar_path(graph, start_node, end_node)\n", - " edges = df_graph.loc[pd.IndexSlice[:, :, node_path]].index.get_level_values(\"edge_id\").to_numpy()\n", - " geom = shapely.ops.linemerge(df_center_single.geometry.loc[pd.IndexSlice[poly_id, edges]].tolist())\n", - " shortest_paths[\"poly_id\"].append(poly_id)\n", - " shortest_paths[\"start_node\"].append(start_node)\n", - " shortest_paths[\"end_node\"].append(end_node)\n", - " shortest_paths[\"geometry\"].append(geom)\n", - " except Exception as e:\n", - " print(e)\n", - " pass\n", - "\n", - "df_startcrossings = df_crossings[~pd.isna(df_crossings.start_node)].copy()\n", - "shortest_paths = gpd.GeoDataFrame(shortest_paths, geometry=\"geometry\", crs=df_crossings.crs)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "df_merged.to_file(\"test_voronoi.gpkg\", layer=\"merged_poly\")\n", - "df_center_single.to_file(\"test_voronoi.gpkg\", layer=\"edges\")\n", - "df_center_single_boundary_points.to_file(\"test_voronoi.gpkg\", layer=\"nodes\")\n", - "shortest_paths.to_file(\"test_voronoi.gpkg\", layer=\"shortest_paths\")\n", - "df_startcrossings.to_file(\"test_voronoi.gpkg\", layer=\"start_crossings\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# # Check of alles mooi verbonden is\n", - "# for poly_id, polygroup in tqdm.tqdm(df_center_single_boundary.groupby(\"poly_id\", sort=False), desc=\"check connections\"):\n", - "# for i, row in enumerate(polygroup.itertuples()):\n", - "# idx = row.Index\n", - "# geom = row.geometry\n", - "\n", - "# idxs = polygroup.sindex.query(geom, predicate=\"intersects\")\n", - "# idxs = idxs[idxs != i]\n", - "# dists = polygroup.geometry.iloc[idxs].distance(row.geometry)\n", - "# if dists.min() > 0:\n", - "# print(f\"no closed connection for {idx}, {dist.min()=}\")\n", - "# if len(idxs) == 0:\n", - "# print(f\"No connection for {idx}: {polygroup.iloc[idxs].index}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/compute_voronoi.py b/src/peilbeheerst_model/compute_voronoi.py new file mode 100644 index 0000000..862c8f0 --- /dev/null +++ b/src/peilbeheerst_model/compute_voronoi.py @@ -0,0 +1,267 @@ +import centerline.geometry +import geopandas as gpd +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import shapely.geometry +import tqdm.auto as tqdm + +df = gpd.read_file("../../../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip.gpkg", layer="peilgebied") +df["geometry"] = df.buffer(0) +df = df[~df.is_empty].copy() +df["geometry"] = df.geometry.apply(shapely.force_2d) +df = df[df.peilgebied_cat == 1].copy() + +df_crossings = gpd.read_file( + "../../../../Data_crossings/Wetterskip/wetterskip_crossings_v05.gpkg", layer="crossings_hydroobject_filtered" +) +df_crossings = df_crossings[df_crossings.agg_links_in_use].copy() + + +# Merge polygons with a small buffer. Ook nodig om verbindingen te krijgen in sommige smalle watergangen. +df_merged = df.buffer(1.0).unary_union +df_merged = gpd.GeoDataFrame(geometry=list(df_merged.geoms), crs=df.crs) + +# add merged id to original polygons +merged_poly_ids = [] +for row in tqdm.tqdm(df.itertuples(), total=len(df)): + idxs = df_merged.sindex.query(row.geometry, predicate="intersects") + if len(idxs) == 0: + raise ValueError("no matches") + elif len(idxs) > 1: + overlaps = [] + for idx in idxs: + overlap = df_merged.iat[idx].intersection(row.geometry).area / row.geometry.area + overlaps.append(overlap) + idx = idxs.index(max(overlaps)) + else: + idx = idxs[0] + merged_poly_ids.append(idx) +df["merged_poly_id"] = merged_poly_ids + + +df_center = [] +for idx, row in tqdm.tqdm(df_merged.iterrows(), total=len(df_merged)): + geom = row.geometry + interp_dist = 10 + if geom.area < 1000: + interp_dist = 1 + if geom.area < 100: + interp_dist = 0.1 + if geom.area < 10: + interp_dist = 0.01 + if geom.area < 1: + interp_dist = 0.001 + centerpoly = centerline.geometry.Centerline(geom, interpolation_distance=interp_dist) + centerpoly = centerpoly.geometry + centerpoly = centerpoly.simplify(1, preserve_topology=True) + df_center.append(centerpoly) +df_center = gpd.GeoDataFrame(geometry=list(df_center), crs=df.crs) + + +df_center_single = df_center.explode(index_parts=False) +df_center_single = df_center_single.set_index(np.arange(len(df_center_single)), append=True) +df_center_single.index.set_names(["poly_id", "edge_id"], inplace=True) + +df_center_single_boundary = df_center_single.copy() +df_center_single_boundary["geometry"] = df_center_single.boundary + +# # Check of alles mooi verbonden is +# for i, row in tqdm.tqdm(enumerate(df_center_single_boundary.itertuples()), total=len(df_center_single_boundary), desc="check connections"): +# idx = row.Index +# geom = row.geometry + +# idxs, dists = df_center_single_boundary.sindex.nearest(geom, return_distance=True, return_all=True) +# idxs = idxs[1, :] +# dists = dists[idxs != i] +# idxs = idxs[idxs != i] +# if dists.min() > 0: +# print(f"no closed connection for {idx}, {dist.min()=}") +# elif len(idxs) == 0: +# print(f"No connection for {idx}: {df_center_single_boundary.iloc[idxs].index}") + +df_center_single_boundary_points = df_center_single_boundary.explode(index_parts=True) +df_center_single_boundary_points["node_id"] = None +df_center_single_boundary_points["connectivity"] = None + +node_id = 0 +idxs, node_ids, connectivity = [], [], [] +for poly_id, poly_group in tqdm.tqdm( + df_center_single_boundary_points.groupby("poly_id", sort=False), desc="assign node ids" +): + for geom, group in tqdm.tqdm(poly_group.groupby("geometry", sort=False), desc=f"{poly_id=}", leave=False): + idxs.append(group.index) + node_ids.append(len(group) * [node_id]) + connectivity.append(len(group) * [len(group)]) + node_id += 1 + +df_center_single_boundary_points.loc[np.hstack(idxs), "node_id"] = np.hstack(node_ids) +df_center_single_boundary_points.loc[np.hstack(idxs), "connectivity"] = np.hstack(connectivity) +idxs, node_ids, connectivity = None, None, None + +assert not pd.isna(df_center_single_boundary_points.node_id).any() +assert not pd.isna(df_center_single_boundary_points.connectivity).any() + +df_center_single_boundary_points = df_center_single_boundary_points.droplevel(-1).set_index("node_id", append=True) +df_center_single_boundary_points + + +# Alleen edges proberen te mergen waarvan beide uiteindes (nodes) connectivity 2 hebben +pot_reduce = [] +for edge_id, group in tqdm.tqdm( + df_center_single_boundary_points.groupby("edge_id", sort=False), desc="Find connectivity=2" +): + if (group.connectivity == 2).all(): + pot_reduce.append(edge_id) +pot_reduce = df_center_single_boundary.loc[pd.IndexSlice[:, pot_reduce], :].copy() + +# Identify merge groups +edges_visited = {} +merge_group = 0 +pot_reduce["merge_group"] = None +for poly_id, polygroup in tqdm.tqdm(pot_reduce.groupby("poly_id", sort=False), desc="group edges per polygon"): + for edge_id, group in tqdm.tqdm(polygroup.groupby("edge_id", sort=False), leave=False, desc=f"{poly_id=}"): + if edge_id in edges_visited: + continue + + ivec = np.where(polygroup.index.isin(group.index))[0] + prev_len = 0 + while len(ivec) != prev_len: + prev_len = len(ivec) + ivec = polygroup.sindex.query(polygroup.geometry.iloc[ivec], predicate="intersects") + ivec = np.unique(ivec[1, :]) + + lbls = polygroup.index[ivec] + assert len(pot_reduce.loc[lbls].index.get_level_values("poly_id").unique()) == 1 + pot_reduce.loc[lbls, "merge_group"] = merge_group + + for eid in lbls.get_level_values("edge_id"): + edges_visited[eid] = True + merge_group += 1 + +# Merge +df_center_single_red = df_center_single[~df_center_single.index.isin(pot_reduce.index)].copy() +add_rows = [] +for group_id, group in tqdm.tqdm(pot_reduce.groupby("merge_group", dropna=True, sort=False), desc="merge edges"): + edges_to_merge = np.unique(group.index.get_level_values("edge_id").to_numpy()) + geoms = df_center_single.geometry.loc[pd.IndexSlice[:, edges_to_merge]].tolist() + geom = shapely.ops.linemerge(geoms) + assert geom.geom_type == "LineString" + single_row = df_center_single.loc[pd.IndexSlice[:, edges_to_merge[0]], :].copy() + single_row.loc[:, "geometry"] = geom + add_rows.append(single_row) + +# Overwrite dataframes +df_center_single = pd.concat([df_center_single_red] + add_rows) + +df_center_single_boundary = df_center_single.copy() +df_center_single_boundary["geometry"] = df_center_single.boundary + +df_center_single_boundary_points = df_center_single_boundary.explode(index_parts=True) +df_center_single_boundary_points["node_id"] = None +idxs, node_ids = [], [] +for node_id, (geom, group) in enumerate( + tqdm.tqdm(df_center_single_boundary_points.groupby("geometry", sort=False), desc="assign node ids") +): + idxs.append(group.index) + node_ids.append(len(group) * [node_id]) +df_center_single_boundary_points.loc[np.hstack(idxs), "node_id"] = np.hstack(node_ids) +assert not pd.isna(df_center_single_boundary_points.node_id).any() +df_center_single_boundary_points = df_center_single_boundary_points.droplevel(-1).set_index("node_id", append=True) + +# # Check of alles mooi verbonden is +# for i, row in tqdm.tqdm(enumerate(df_center_single_boundary.itertuples()), total=len(df_center_single_boundary), desc="check connections"): +# idx = row.Index +# geom = row.geometry + +# idxs, dists = df_center_single_boundary.sindex.nearest(geom, return_distance=True, return_all=True) +# idxs = idxs[1, :] +# dists = dists[idxs != i] +# idxs = idxs[idxs != i] +# if dists.min() > 0: +# print(f"no closed connection for {idx}, {dist.min()=}") +# elif len(idxs) == 0: +# print(f"No connection for {idx}: {df_center_single_boundary.iloc[idxs].index}") + +df_center_single_boundary_points + + +edge_lengths = dict(zip(df_center_single.index.get_level_values("edge_id"), df_center_single.length)) +shortest_paths = {"poly_id": [], "start_node": [], "end_node": [], "geometry": []} +for poly_id, row in tqdm.tqdm(df_merged.iterrows(), total=len(df_merged)): + merged_poly = row.geometry + + globalids = df.globalid.loc[df.merged_poly_id == poly_id].unique() + df_crossings_single = df_crossings[ + df_crossings.peilgebied_from.isin(globalids) | df_crossings.peilgebied_to.isin(globalids) + ].copy() + + # End point + df_graph = df_center_single_boundary_points.loc[pd.IndexSlice[poly_id, :, :], :].copy() + idx_end, distance_end = df_graph.sindex.nearest( + merged_poly.representative_point(), return_distance=True, return_all=False + ) + distance_end = distance_end[0] + idx_end = idx_end[1, 0] + idx_end = df_graph.index[idx_end] + end_node = idx_end[-1] + df_crossings + # print(f"{poly_id=}, closest vertex for endpoint at {distance_end:.2f}m ({idx_end=})") + + # Starting points + idxs, distances = df_graph.sindex.nearest(df_crossings_single.geometry, return_distance=True, return_all=False) + idx_cross = df_crossings_single.iloc[idxs[0, :]].index + df_crossings_single.loc[idx_cross, "start_node"] = df_graph.iloc[idxs[1, :]].index.get_level_values("node_id") + df_crossings.loc[idx_cross, "start_node"] = df_graph.iloc[idxs[1, :]].index.get_level_values("node_id") + start_nodes = df_crossings_single["start_node"].dropna().unique().astype(int).tolist() + + # Make network for this polygon + node_ids = df_graph.index.get_level_values("node_id") + graph = nx.Graph() + + # Add nodes and edges + graph.add_nodes_from(node_ids.unique().tolist()) + for edge_id, group in df_graph.groupby("edge_id", sort=False): + node1, node2 = group.index.get_level_values("node_id").tolist() + graph.add_edge(node1, node2, weight=edge_lengths[edge_id]) + + # Determine shortest path for each start node + for start_node in tqdm.tqdm(start_nodes, leave=False, desc=f"{poly_id=}"): + try: + # node_path = nx.dijkstra_path(graph, start_node, end_node) + node_path = nx.astar_path(graph, start_node, end_node) + edges = df_graph.loc[pd.IndexSlice[:, :, node_path]].index.get_level_values("edge_id").to_numpy() + geom = shapely.ops.linemerge(df_center_single.geometry.loc[pd.IndexSlice[poly_id, edges]].tolist()) + shortest_paths["poly_id"].append(poly_id) + shortest_paths["start_node"].append(start_node) + shortest_paths["end_node"].append(end_node) + shortest_paths["geometry"].append(geom) + except Exception as e: + print(e) + pass + +df_startcrossings = df_crossings[~pd.isna(df_crossings.start_node)].copy() +shortest_paths = gpd.GeoDataFrame(shortest_paths, geometry="geometry", crs=df_crossings.crs) + + +df_merged.to_file("test_voronoi.gpkg", layer="merged_poly") +df_center_single.to_file("test_voronoi.gpkg", layer="edges") +df_center_single_boundary_points.to_file("test_voronoi.gpkg", layer="nodes") +shortest_paths.to_file("test_voronoi.gpkg", layer="shortest_paths") +df_startcrossings.to_file("test_voronoi.gpkg", layer="start_crossings") + + +# # Check of alles mooi verbonden is +# for poly_id, polygroup in tqdm.tqdm(df_center_single_boundary.groupby("poly_id", sort=False), desc="check connections"): +# for i, row in enumerate(polygroup.itertuples()): +# idx = row.Index +# geom = row.geometry + +# idxs = polygroup.sindex.query(geom, predicate="intersects") +# idxs = idxs[idxs != i] +# dists = polygroup.geometry.iloc[idxs].distance(row.geometry) +# if dists.min() > 0: +# print(f"no closed connection for {idx}, {dist.min()=}") +# if len(idxs) == 0: +# print(f"No connection for {idx}: {polygroup.iloc[idxs].index}") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.ipynb deleted file mode 100644 index 3fcfcff..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.ipynb +++ /dev/null @@ -1,482 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# WSRL" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## WSRL" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"WSRL\"\n", - "\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_wsrl.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "### Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "WSRL = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "WSRL[\"peilgebied\"] = WSRL[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "# check primary key\n", - "WSRL[\"peilgebied\"][\"globalid\"].is_unique" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"Waterschap Rivierenland\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(WSRL[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# # # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "# non_overlapping_peilgebied = gpd.overlay(WSRL['peilgebied'], overlaps, how='difference', keep_geom_type=True)\n", - "# overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how='intersection', keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 20000\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "for index, row in WSRL[\"peilgebied\"].iterrows():\n", - " if row.CODE == \"LNG014-P\":\n", - " print(\"yes\")\n", - " peilgebieden_cat.append(1)\n", - "\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - "# Add new column and drop old HWS_BZM column\n", - "WSRL[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat\n", - "# WSRL['peilgebied'] = WSRL['peilgebied'].drop(columns=['HWS_BZM'])\n", - "WSRL[\"peilgebied\"] = WSRL[\"peilgebied\"].rename(columns={\"CODE\": \"code\"})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# list(WSRL['peilgebied'].code.unique())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# add boezems\n", - "codes_to_update = [\n", - " \"NDB004-P\",\n", - " \"LNG013-P\",\n", - " \"LNG012-P\",\n", - " \"LNG011-P\",\n", - " \"LNG010-P\",\n", - " \"LNG009-P\",\n", - " \"LNG008-P\",\n", - " \"LNG007-P\",\n", - " \"LNG006-P\",\n", - " \"LNG005-P\",\n", - " \"LNG304-P\",\n", - " \"LNG002-P\",\n", - " \"LNG001-P\",\n", - " \"LNG014-P_extra\",\n", - " \"NDW100-P\",\n", - " \"OVW200-P\",\n", - "]\n", - "WSRL[\"peilgebied\"].loc[WSRL[\"peilgebied\"][\"code\"].isin(codes_to_update), \"peilgebied_cat\"] = 1" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "## Adjust globalid, code, nen3610id ['streefpeil'], ['peilgebied']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "codes = []\n", - "globalids = []\n", - "nen3610ids = []\n", - "\n", - "for index, row in WSRL[\"peilgebied\"].iterrows():\n", - " codes.append(f\"dummy_code_peilgebied_{row.globalid}\")\n", - " globalids.append(f\"dummy_globalid_peilgebied_{row.globalid}\")\n", - " nen3610ids.append(f\"dummy_nen3610id_peilgebied_{row.globalid}\")\n", - "\n", - "WSRL[\"peilgebied\"][\"code\"] = codes\n", - "WSRL[\"peilgebied\"][\"globalid\"] = globalids\n", - "WSRL[\"peilgebied\"][\"nen3610id\"] = nen3610ids\n", - "\n", - "WSRL[\"streefpeil\"][\"globalid\"] = globalids" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "WSRL[\"peilgebied\"][\"globalid\"].is_unique" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "WSRL[\"peilgebied\"] = pd.concat([gdf_hws, WSRL[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "WSRL[\"streefpeil\"] = pd.concat([streefpeil_hws, WSRL[\"streefpeil\"]])\n", - "WSRL[\"streefpeil\"] = gpd.GeoDataFrame(WSRL[\"streefpeil\"])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "WSRL[\"peilgebied\"][\"globalid\"].is_unique" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0])\n", - "# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist()))\n", - "# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(WSRL['peilgebied'].geometry.tolist()))\n", - "\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon = buffer_polygon.set_geometry(0)\n", - "# buffer_polygon = buffer_polygon.dissolve()\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon.set_geometry('geometry')\n", - "# buffer_polygon = buffer_polygon.set_crs('EPSG:28992')" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": {}, - "source": [ - "## Add buffer to ['peilgebied']" - ] - }, - { - "cell_type": "markdown", - "id": "24", - "metadata": {}, - "source": [ - "## Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1']\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "# WSRL['streefpeil'] = pd.concat([streefpeil_buffer, WSRL['streefpeil']])\n", - "# WSRL['streefpeil'] = gpd.GeoDataFrame(WSRL['streefpeil'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " WSRL[\"peilgebied\"] = WSRL[\"peilgebied\"].loc[WSRL[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "WSRL[\"peilgebied\"][\"peilgebied_cat\"].unique()" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": {}, - "source": [ - "## Store output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "for key in WSRL.keys():\n", - " print(key)\n", - " WSRL[str(key)].to_file(f\"{output_folder}/{waterschap}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py new file mode 100644 index 0000000..5474c02 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py @@ -0,0 +1,230 @@ +# # WSRL + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS + + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## WSRL + + +# define relative paths +waterschap = "WSRL" + +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_wsrl.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}" + + +# ### Load Files + + +# Load HHNK files +WSRL = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +WSRL["peilgebied"] = WSRL["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) + + +# check primary key +WSRL["peilgebied"]["globalid"].is_unique + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["Waterschap Rivierenland"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(WSRL["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# # # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +# non_overlapping_peilgebied = gpd.overlay(WSRL['peilgebied'], overlaps, how='difference', keep_geom_type=True) +# overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how='intersection', keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 20000 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + + +# Add occurence to geodataframe +peilgebieden_cat = [] + +for index, row in WSRL["peilgebied"].iterrows(): + if row.CODE == "LNG014-P": + print("yes") + peilgebieden_cat.append(1) + + else: + peilgebieden_cat.append(0) + +# Add new column and drop old HWS_BZM column +WSRL["peilgebied"]["peilgebied_cat"] = peilgebieden_cat +# WSRL['peilgebied'] = WSRL['peilgebied'].drop(columns=['HWS_BZM']) +WSRL["peilgebied"] = WSRL["peilgebied"].rename(columns={"CODE": "code"}) + + +# list(WSRL['peilgebied'].code.unique()) + + +# add boezems +codes_to_update = [ + "NDB004-P", + "LNG013-P", + "LNG012-P", + "LNG011-P", + "LNG010-P", + "LNG009-P", + "LNG008-P", + "LNG007-P", + "LNG006-P", + "LNG005-P", + "LNG304-P", + "LNG002-P", + "LNG001-P", + "LNG014-P_extra", + "NDW100-P", + "OVW200-P", +] +WSRL["peilgebied"].loc[WSRL["peilgebied"]["code"].isin(codes_to_update), "peilgebied_cat"] = 1 + + +# ## Adjust globalid, code, nen3610id ['streefpeil'], ['peilgebied'] + + +codes = [] +globalids = [] +nen3610ids = [] + +for index, row in WSRL["peilgebied"].iterrows(): + codes.append(f"dummy_code_peilgebied_{row.globalid}") + globalids.append(f"dummy_globalid_peilgebied_{row.globalid}") + nen3610ids.append(f"dummy_nen3610id_peilgebied_{row.globalid}") + +WSRL["peilgebied"]["code"] = codes +WSRL["peilgebied"]["globalid"] = globalids +WSRL["peilgebied"]["nen3610id"] = nen3610ids + +WSRL["streefpeil"]["globalid"] = globalids + + +WSRL["peilgebied"]["globalid"].is_unique + + +# ## Add nhws to ['peilgebied','streefpeil'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +WSRL["peilgebied"] = pd.concat([gdf_hws, WSRL["peilgebied"]]) + + +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +WSRL["streefpeil"] = pd.concat([streefpeil_hws, WSRL["streefpeil"]]) +WSRL["streefpeil"] = gpd.GeoDataFrame(WSRL["streefpeil"]) + + +WSRL["peilgebied"]["globalid"].is_unique + + +# ### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer + + +# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0]) +# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist())) +# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(WSRL['peilgebied'].geometry.tolist())) + +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon = buffer_polygon.set_geometry(0) +# buffer_polygon = buffer_polygon.dissolve() +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon.set_geometry('geometry') +# buffer_polygon = buffer_polygon.set_crs('EPSG:28992') + + +# ## Add buffer to ['peilgebied'] + +# ## Add buffer to ['peilgebied','streefpeil'] + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1'] +# streefpeil_buffer['geometry'] = [None] + +# WSRL['streefpeil'] = pd.concat([streefpeil_buffer, WSRL['streefpeil']]) +# WSRL['streefpeil'] = gpd.GeoDataFrame(WSRL['streefpeil']) + + +if remove_cat_2: + WSRL["peilgebied"] = WSRL["peilgebied"].loc[WSRL["peilgebied"].peilgebied_cat != 2] + + +WSRL["peilgebied"]["peilgebied_cat"].unique() + + +# ## Store output + + +for key in WSRL.keys(): + print(key) + WSRL[str(key)].to_file(f"{output_folder}/{waterschap}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.ipynb deleted file mode 100644 index caad67e..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.ipynb +++ /dev/null @@ -1,403 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Delfland" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> boezem\n", - "- peilgebied_cat = 2 -> HWSNotes:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "from general_functions import *\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Set Paths" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Delfland\"\n", - "\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_delfland.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}/\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "## Load files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "delfland = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "\n", - "delfland[\"peilgebied\"] = delfland[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"HHS van Delfland\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "## Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(delfland[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(delfland[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 200\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "## Create peilgebied_cat column" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "ids = []\n", - "\n", - "for index, row in delfland[\"peilgebied\"].iterrows():\n", - " if row.code.startswith(\"BZM\") or row.HWS_BZM:\n", - " print(\"yes\")\n", - " peilgebieden_cat.append(1)\n", - "\n", - " # Check if the row's globalid is in overlap_ids\n", - " elif row.globalid in overlap_ids:\n", - " peilgebieden_cat.append(2)\n", - "\n", - " # If none of the above conditions are met, append 0\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - "# Add new column and drop old HWS_BZM column\n", - "delfland[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat\n", - "delfland[\"peilgebied\"] = delfland[\"peilgebied\"].drop(columns=[\"HWS_BZM\"])" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "## Add HWS to ['peilgebied', 'streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid__hws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_hws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_hws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "delfland[\"peilgebied\"] = pd.concat([gdf_hws, delfland[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_hws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "delfland[\"streefpeil\"] = pd.concat([streefpeil_hws, delfland[\"streefpeil\"]])\n", - "delfland[\"streefpeil\"] = gpd.GeoDataFrame(delfland[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create buffer polygon\n", - "# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True)\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True)\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, delfland['peilgebied'], how='difference', keep_geom_type=True)" - ] - }, - { - "cell_type": "markdown", - "id": "18", - "metadata": {}, - "source": [ - "## Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_buffer_1' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# delfland['peilgebied'] = pd.concat([buffer_polygon, delfland['peilgebied']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = ['dummy_globalid_buffer_1']\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# delfland['streefpeil'] = pd.concat([streefpeil_buffer, delfland['streefpeil']])\n", - "# delfland['streefpeil'] = gpd.GeoDataFrame(delfland['streefpeil'])\n", - "\n", - "# # Fix\n", - "# delfland['streefpeil']['waterhoogte'] = delfland['streefpeil']['waterhoogte'].replace('N/A', np.nan)\n", - "# delfland['streefpeil']['waterhoogte'] = pd.to_numeric(delfland['streefpeil']['waterhoogte'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "delfland[\"peilgebied\"].peilgebied_cat.unique()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "delfland[\"peilgebied\"] = delfland[\"peilgebied\"][[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " delfland[\"peilgebied\"] = delfland[\"peilgebied\"].loc[delfland[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "24", - "metadata": {}, - "source": [ - "## Write output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "for key in delfland.keys():\n", - " print(key)\n", - " delfland[str(key)].to_file(f\"{output_folder}/{waterschap}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py new file mode 100644 index 0000000..fe82a3f --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py @@ -0,0 +1,204 @@ +# # Delfland + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> boezem +# - peilgebied_cat = 2 -> HWSNotes: +# + + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## Set Paths + + +# define relative paths +waterschap = "Delfland" + +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_delfland.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}/" + + +# ## Load files + + +# Load HHNK files +delfland = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) + +delfland["peilgebied"] = delfland["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["HHS van Delfland"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +# ## Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(delfland["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(delfland["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 200 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + + +# ## Create peilgebied_cat column + + +# Add occurence to geodataframe +peilgebieden_cat = [] +ids = [] + +for index, row in delfland["peilgebied"].iterrows(): + if row.code.startswith("BZM") or row.HWS_BZM: + print("yes") + peilgebieden_cat.append(1) + + # Check if the row's globalid is in overlap_ids + elif row.globalid in overlap_ids: + peilgebieden_cat.append(2) + + # If none of the above conditions are met, append 0 + else: + peilgebieden_cat.append(0) + +# Add new column and drop old HWS_BZM column +delfland["peilgebied"]["peilgebied_cat"] = peilgebieden_cat +delfland["peilgebied"] = delfland["peilgebied"].drop(columns=["HWS_BZM"]) + + +# ## Add HWS to ['peilgebied', 'streefpeil'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid__hws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_hws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_hws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +delfland["peilgebied"] = pd.concat([gdf_hws, delfland["peilgebied"]]) + + +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_hws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +delfland["streefpeil"] = pd.concat([streefpeil_hws, delfland["streefpeil"]]) +delfland["streefpeil"] = gpd.GeoDataFrame(delfland["streefpeil"]) + + +# ### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer + + +# # Create buffer polygon +# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True) +# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True) +# buffer_polygon = gpd.overlay(buffer_polygon, delfland['peilgebied'], how='difference', keep_geom_type=True) + + +# ## Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon['globalid'] = 'dummy_globalid_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_buffer_1' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 + +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# delfland['peilgebied'] = pd.concat([buffer_polygon, delfland['peilgebied']]) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = ['dummy_globalid_buffer_1'] +# streefpeil_buffer['geometry'] = [None] + + +# delfland['streefpeil'] = pd.concat([streefpeil_buffer, delfland['streefpeil']]) +# delfland['streefpeil'] = gpd.GeoDataFrame(delfland['streefpeil']) + +# # Fix +# delfland['streefpeil']['waterhoogte'] = delfland['streefpeil']['waterhoogte'].replace('N/A', np.nan) +# delfland['streefpeil']['waterhoogte'] = pd.to_numeric(delfland['streefpeil']['waterhoogte']) + + +delfland["peilgebied"].peilgebied_cat.unique() + + +delfland["peilgebied"] = delfland["peilgebied"][["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + + +if remove_cat_2: + delfland["peilgebied"] = delfland["peilgebied"].loc[delfland["peilgebied"].peilgebied_cat != 2] + + +# ## Write output + + +for key in delfland.keys(): + print(key) + delfland[str(key)].to_file(f"{output_folder}/{waterschap}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.ipynb deleted file mode 100644 index 7801eab..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.ipynb +++ /dev/null @@ -1,545 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Rijnland" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "from itertools import combinations\n", - "\n", - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Rijnland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# HD['peilgebied'].globalid.is_unique#define relative paths\n", - "waterschap = \"Rijnland\"\n", - "\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_rijnland.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "### Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "Rijnland = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "Rijnland[\"peilgebied\"] = Rijnland[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)\n", - "\n", - "# temp\n", - "Rijnland[\"peilgebied\"] = Rijnland[\"peilgebied\"].drop(index=2, axis=1)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "Rijnland[\"peilgebied\"].globalid.is_unique" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"HH van Rijnland\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "## Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(Rijnland[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(Rijnland[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 200\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "## Create peilgebied_cat column" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "for index, row in Rijnland[\"peilgebied\"].iterrows():\n", - " if \"RIJNLANDSBOEZEM\" in row.code:\n", - " print(\"true\")\n", - " peilgebieden_cat.append(1)\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - "# Add new column and drop old HWS_BZM column\n", - "Rijnland[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "Rijnland[\"peilgebied\"] = pd.concat([gdf_hws, Rijnland[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "Rijnland[\"streefpeil\"] = pd.concat([streefpeil_hws, Rijnland[\"streefpeil\"]])\n", - "Rijnland[\"streefpeil\"] = gpd.GeoDataFrame(Rijnland[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "### Create buffer polygon between NHWS and peilgebied/RHWS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0])\n", - "\n", - "# polygons = []\n", - "# for geom in gdf_hws.geometry:\n", - "# if isinstance(geom, MultiPolygon):\n", - "# for poly in geom:\n", - "# polygons.append(poly)\n", - "# elif isinstance(geom, Polygon):\n", - "# polygons.append(geom)\n", - "\n", - "# buffer_polygon = buffer_polygon.difference(MultiPolygon(polygons))\n", - "# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(Rijnland['peilgebied'].geometry.tolist()))\n", - "# buffer_polygon_gdf = gpd.GeoDataFrame([{'geometry': geom} for geom in buffer_polygon], geometry='geometry', crs='EPSG:28992')\n", - "\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon = buffer_polygon.set_geometry(0)\n", - "# buffer_polygon = buffer_polygon.dissolve()\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon.set_geometry('geometry')\n", - "# buffer_polygon = buffer_polygon.set_crs('EPSG:28992')\n" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "### Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# Rijnland['peilgebied'] = pd.concat([buffer_polygon, Rijnland['peilgebied']])\n", - "# Rijnland['peilgebied'] = gpd.GeoDataFrame(Rijnland['peilgebied'])\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# Rijnland['streefpeil'] = pd.concat([streefpeil_buffer, Rijnland['streefpeil']])\n", - "# Rijnland['streefpeil'] = gpd.GeoDataFrame(Rijnland['streefpeil'])\n" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": {}, - "source": [ - "## Rijnland data contains many duplicate peilgebieden\n", - "### Calculate polygons that overlap with more than 90 % of their area" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "gdf = Rijnland[\"peilgebied\"][3:]\n", - "\n", - "# Initialize a list to store index pairs with more than 90% overlap\n", - "overlapping_pairs = []\n", - "\n", - "# Iterate through each unique pair of geometries\n", - "for idx1, idx2 in combinations(gdf.index, 2):\n", - " print(f\"Processing {idx1} out of {len(gdf)}...\", end=\"\\r\")\n", - " geom1 = gdf.at[idx1, \"geometry\"]\n", - " geom2 = gdf.at[idx2, \"geometry\"]\n", - "\n", - " # Calculate intersection\n", - " intersection = geom1.intersection(geom2)\n", - " intersection_area = intersection.area\n", - "\n", - " # Calculate original areas\n", - " area1 = geom1.area\n", - " area2 = geom2.area\n", - "\n", - " # Calculate intersection percentage for each geometry\n", - " intersection_percentage1 = (intersection_area / area1) * 100\n", - " intersection_percentage2 = (intersection_area / area2) * 100\n", - "\n", - " # Check if both geometries overlap more than 90%\n", - " if intersection_percentage1 > 90 and intersection_percentage2 > 90:\n", - " overlapping_pairs.append((idx1, idx2))\n", - "\n", - "idx1s = []\n", - "idx2s = []\n", - "\n", - "glob_1s = []\n", - "glob_2s = []\n", - "\n", - "\n", - "for idx1, idx2 in overlapping_pairs:\n", - " idx1s.append(idx1)\n", - " idx2s.append(idx2)\n", - "\n", - " glob_1s.append(gdf.iloc[idx1].globalid)\n", - " glob_2s.append(gdf.iloc[idx2].globalid)\n", - "\n", - "\n", - "df = pd.DataFrame()\n", - "df[\"idx1\"] = idx1s\n", - "df[\"idx2\"] = idx2s\n", - "df[\"globalid_1\"] = glob_1s\n", - "df[\"globalid_2\"] = glob_2s\n", - "\n", - "df.to_csv(\"../overlapping_Rijnland.csv\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "print(df)" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": {}, - "source": [ - "### Create list of duplicates for removal" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "numbers_to_remove = []\n", - "\n", - "# Go loop unique index values\n", - "for number in df[\"idx1\"].unique():\n", - " if number in numbers_to_remove:\n", - " continue\n", - "\n", - " # Find all combinations\n", - " associated_idx2 = df[df[\"idx1\"] == number][\"idx2\"].tolist()\n", - " # Append combinations\n", - " numbers_to_remove.extend(associated_idx2)\n", - "\n", - "# Remove duplicates using set operation\n", - "numbers_to_remove = list(set(numbers_to_remove))" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": {}, - "source": [ - "### Remove duplicates" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "Rijnland[\"peilgebied\"] = Rijnland[\"peilgebied\"][~Rijnland[\"peilgebied\"].index.isin(numbers_to_remove)]\n", - "Rijnland[\"streefpeil\"] = Rijnland[\"streefpeil\"][~Rijnland[\"streefpeil\"].index.isin(numbers_to_remove)]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " Rijnland[\"peilgebied\"] = Rijnland[\"peilgebied\"].loc[Rijnland[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "31", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": {}, - "outputs": [], - "source": [ - "for key in Rijnland.keys():\n", - " print(key)\n", - " Rijnland[str(key)].to_file(f\"{output_folder}/{waterschap}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py new file mode 100644 index 0000000..789cd48 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py @@ -0,0 +1,294 @@ +# # Rijnland + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS + + +from itertools import combinations + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## Rijnland + + +# HD['peilgebied'].globalid.is_unique#define relative paths +waterschap = "Rijnland" + +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_rijnland.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}" + + +# ### Load Files + + +# Load HHNK files +Rijnland = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +Rijnland["peilgebied"] = Rijnland["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) + +# temp +Rijnland["peilgebied"] = Rijnland["peilgebied"].drop(index=2, axis=1) + + +Rijnland["peilgebied"].globalid.is_unique + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["HH van Rijnland"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +# ## Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(Rijnland["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(Rijnland["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 200 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + + +# ## Create peilgebied_cat column + + +# Add occurence to geodataframe +peilgebieden_cat = [] + +for index, row in Rijnland["peilgebied"].iterrows(): + if "RIJNLANDSBOEZEM" in row.code: + print("true") + peilgebieden_cat.append(1) + else: + peilgebieden_cat.append(0) + +# Add new column and drop old HWS_BZM column +Rijnland["peilgebied"]["peilgebied_cat"] = peilgebieden_cat + + +# ## Add nhws to ['peilgebied','streefpeil'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +Rijnland["peilgebied"] = pd.concat([gdf_hws, Rijnland["peilgebied"]]) + + +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +Rijnland["streefpeil"] = pd.concat([streefpeil_hws, Rijnland["streefpeil"]]) +Rijnland["streefpeil"] = gpd.GeoDataFrame(Rijnland["streefpeil"]) + + +# ### Create buffer polygon between NHWS and peilgebied/RHWS + + +# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0]) + +# polygons = [] +# for geom in gdf_hws.geometry: +# if isinstance(geom, MultiPolygon): +# for poly in geom: +# polygons.append(poly) +# elif isinstance(geom, Polygon): +# polygons.append(geom) + +# buffer_polygon = buffer_polygon.difference(MultiPolygon(polygons)) +# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(Rijnland['peilgebied'].geometry.tolist())) +# buffer_polygon_gdf = gpd.GeoDataFrame([{'geometry': geom} for geom in buffer_polygon], geometry='geometry', crs='EPSG:28992') + +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon = buffer_polygon.set_geometry(0) +# buffer_polygon = buffer_polygon.dissolve() +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon.set_geometry('geometry') +# buffer_polygon = buffer_polygon.set_crs('EPSG:28992') + + +# ### Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# Rijnland['peilgebied'] = pd.concat([buffer_polygon, Rijnland['peilgebied']]) +# Rijnland['peilgebied'] = gpd.GeoDataFrame(Rijnland['peilgebied']) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# streefpeil_buffer['geometry'] = [None] + + +# Rijnland['streefpeil'] = pd.concat([streefpeil_buffer, Rijnland['streefpeil']]) +# Rijnland['streefpeil'] = gpd.GeoDataFrame(Rijnland['streefpeil']) + + +# ## Rijnland data contains many duplicate peilgebieden +# ### Calculate polygons that overlap with more than 90 % of their area + + +gdf = Rijnland["peilgebied"][3:] + +# Initialize a list to store index pairs with more than 90% overlap +overlapping_pairs = [] + +# Iterate through each unique pair of geometries +for idx1, idx2 in combinations(gdf.index, 2): + print(f"Processing {idx1} out of {len(gdf)}...", end="\r") + geom1 = gdf.at[idx1, "geometry"] + geom2 = gdf.at[idx2, "geometry"] + + # Calculate intersection + intersection = geom1.intersection(geom2) + intersection_area = intersection.area + + # Calculate original areas + area1 = geom1.area + area2 = geom2.area + + # Calculate intersection percentage for each geometry + intersection_percentage1 = (intersection_area / area1) * 100 + intersection_percentage2 = (intersection_area / area2) * 100 + + # Check if both geometries overlap more than 90% + if intersection_percentage1 > 90 and intersection_percentage2 > 90: + overlapping_pairs.append((idx1, idx2)) + +idx1s = [] +idx2s = [] + +glob_1s = [] +glob_2s = [] + + +for idx1, idx2 in overlapping_pairs: + idx1s.append(idx1) + idx2s.append(idx2) + + glob_1s.append(gdf.iloc[idx1].globalid) + glob_2s.append(gdf.iloc[idx2].globalid) + + +df = pd.DataFrame() +df["idx1"] = idx1s +df["idx2"] = idx2s +df["globalid_1"] = glob_1s +df["globalid_2"] = glob_2s + +df.to_csv("../overlapping_Rijnland.csv") + + +print(df) + + +# ### Create list of duplicates for removal + + +numbers_to_remove = [] + +# Go loop unique index values +for number in df["idx1"].unique(): + if number in numbers_to_remove: + continue + + # Find all combinations + associated_idx2 = df[df["idx1"] == number]["idx2"].tolist() + # Append combinations + numbers_to_remove.extend(associated_idx2) + +# Remove duplicates using set operation +numbers_to_remove = list(set(numbers_to_remove)) + + +# ### Remove duplicates + + +Rijnland["peilgebied"] = Rijnland["peilgebied"][~Rijnland["peilgebied"].index.isin(numbers_to_remove)] +Rijnland["streefpeil"] = Rijnland["streefpeil"][~Rijnland["streefpeil"].index.isin(numbers_to_remove)] + + +if remove_cat_2: + Rijnland["peilgebied"] = Rijnland["peilgebied"].loc[Rijnland["peilgebied"].peilgebied_cat != 2] + + +# ### Store data + + +for key in Rijnland.keys(): + print(key) + Rijnland[str(key)].to_file(f"{output_folder}/{waterschap}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.ipynb deleted file mode 100644 index a9a37f8..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.ipynb +++ /dev/null @@ -1,536 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Wetterskip" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and make sure the peilgebieden neatly match the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> boezem\n", - "- peilgebied_cat = 2 -> HWS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Set Paths" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Wetterskip\"\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_wetterskip.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}/\"\n", - "# Dm netwerk\n", - "boezem_path = \"/DATAFOLDER/projects/4750_30/Data_overig/DM_netwerk/waterschapsgrenzen_boezem_netwerk.shp\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "## Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "Wetterskip = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "Wetterskip[\"peilgebied\"] = Wetterskip[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)\n", - "\n", - "gdf_rhws = gpd.read_file(boezem_path)" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# # Select boundaries HH Amstel, Gooi en Vecht\n", - "# gdf_grens = gdf_grens.loc[['Wetterskip Fryslân']]\n", - "\n", - "# # Use waterschap boudnaries to clip HWS layer\n", - "# gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how='intersection')" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "## Check Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# # Step 1: Identify the Overlapping Areas and clip\n", - "# overlaps = gpd.overlay(Wetterskip['peilgebied'], gdf_hws, how='intersection', keep_geom_type=True)\n", - "\n", - "# # # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "# non_overlapping_peilgebied = gpd.overlay(Wetterskip['peilgebied'], overlaps, how='difference', keep_geom_type=True)\n", - "# overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how='intersection', keep_geom_type=False)\n", - "\n", - "# # Step 3: Calculate Area Percentages\n", - "# # Calculate the area of overlaps\n", - "# overlaps['overlap_area'] = overlaps.area\n", - "\n", - "# # Step 4: Filter based on area Area Percentages\n", - "# minimum_area = 500\n", - "# print(f'Number of overlapping shapes without filter: {len(overlaps)}')\n", - "# overlap_ids = overlaps.loc[overlaps['overlap_area'] > minimum_area]\n", - "# overlap_ids = overlap_ids.globalid.to_list()\n", - "# print(f'Number of overlapping shapes with filter: {len(overlap_ids)}')" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "## Create peilgebied_cat column" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# Add to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "for index, row in Wetterskip[\"streefpeil\"].iterrows():\n", - " if round(row.waterhoogte, 2) == -0.52:\n", - " peilgebieden_cat.append(1)\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - "Wetterskip[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "Wetterskip[\"peilgebied\"] = pd.concat([gdf_hws, Wetterskip[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# update streefpeil dict key\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "Wetterskip[\"streefpeil\"] = pd.concat([streefpeil_hws, Wetterskip[\"streefpeil\"]])\n", - "Wetterskip[\"streefpeil\"] = gpd.GeoDataFrame(Wetterskip[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "### Create buffer polygon between NHWS and peilgebied/RHWS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0])\n", - "# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist()))\n", - "# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(Wetterskip['peilgebied'].geometry.tolist()))\n", - "\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon = buffer_polygon.set_geometry(0)\n", - "# buffer_polygon = buffer_polygon.dissolve()\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon.set_geometry('geometry')\n", - "# buffer_polygon = buffer_polygon.set_crs('EPSG:28992')" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "### Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# Wetterskip['peilgebied'] = pd.concat([buffer_polygon, Wetterskip['peilgebied']])\n", - "# Wetterskip['peilgebied'] = gpd.GeoDataFrame(Wetterskip['peilgebied'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# Wetterskip['streefpeil'] = pd.concat([streefpeil_buffer, Wetterskip['streefpeil']])\n", - "# Wetterskip['streefpeil'] = gpd.GeoDataFrame(Wetterskip['streefpeil'])" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": {}, - "source": [ - "## Wetterskip data contains many duplicate peilgebieden" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": {}, - "source": [ - "### Calculate polygons that overlap with more than 90 % of their area" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# from itertools import combinations\n", - "# gdf = Wetterskip['peilgebied']\n", - "# # gdf = test\n", - "# gdf = gdf[6:]\n", - "\n", - "# # Initialize a list to store index pairs with more than 90% overlap\n", - "# overlapping_pairs = []\n", - "\n", - "# # Iterate through each unique pair of geometries\n", - "# for idx1, idx2 in combinations(gdf.index, 2):\n", - "# print(f'Processing {idx1} out of {len(gdf)}...', end='\\r')\n", - "# geom1 = gdf.at[idx1, 'geometry']\n", - "# geom2 = gdf.at[idx2, 'geometry']\n", - "\n", - "# # Calculate intersection\n", - "# intersection = geom1.intersection(geom2)\n", - "# intersection_area = intersection.area\n", - "\n", - "# # Calculate original areas\n", - "# area1 = geom1.area\n", - "# area2 = geom2.area\n", - "\n", - "# # Calculate intersection percentage for each geometry\n", - "# intersection_percentage1 = (intersection_area / area1) * 100\n", - "# intersection_percentage2 = (intersection_area / area2) * 100\n", - "\n", - "# # Check if both geometries overlap more than 90%\n", - "# if intersection_percentage1 > 90 and intersection_percentage2 > 90:\n", - "# # print((idx1, idx2))\n", - "# overlapping_pairs.append((idx1, idx2))\n", - "\n", - "# idx1s = []\n", - "# idx2s = []\n", - "\n", - "# glob_1s = []\n", - "# glob_2s = []\n", - "\n", - "\n", - "# for idx1, idx2 in overlapping_pairs:\n", - "\n", - "# idx1s.append(idx1)\n", - "# idx2s.append(idx2)\n", - "\n", - "# glob_1s.append(gdf.iloc[idx1].globalid)\n", - "# glob_2s.append(gdf.iloc[idx2].globalid)\n", - "\n", - "# df = pd.DataFrame()\n", - "# df['idx1'] = idx1s\n", - "# df['idx2'] = idx2s\n", - "# df['globalid_1'] = glob_1s\n", - "# df['globalid_2'] = glob_2s\n", - "\n", - "# df.to_csv('./overlapping_wetterskip.csv')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "# df = pd.read_csv('./overlapping_wetterskip.csv')" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": {}, - "source": [ - "### Create list of duplicates for removal" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "# numbers_to_remove = []\n", - "\n", - "# # Go loop unique index values\n", - "# for number in df['idx1'].unique():\n", - "# if number in numbers_to_remove:\n", - "# continue\n", - "\n", - "# # Find all combinations\n", - "# associated_idx2 = df[df['idx1'] == number]['idx2'].tolist()\n", - "# # Append combinations\n", - "# numbers_to_remove.extend(associated_idx2)\n", - "\n", - "# # Remove duplicates using set operation\n", - "# numbers_to_remove = list(set(numbers_to_remove))" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": {}, - "source": [ - "### Remove duplicates" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "# Wetterskip['peilgebied'] = Wetterskip['peilgebied'][~Wetterskip['peilgebied'].index.isin(numbers_to_remove)]\n", - "# Wetterskip['streefpeil'] = Wetterskip['streefpeil'][~Wetterskip['streefpeil'].index.isin(numbers_to_remove)]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "Wetterskip[\"streefpeil\"][\"globalid\"].is_unique" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " Wetterskip[\"peilgebied\"] = Wetterskip[\"peilgebied\"].loc[Wetterskip[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, - "source": [ - "## Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "for key in Wetterskip.keys():\n", - " print(key)\n", - " Wetterskip[str(key)].to_file(f\"{output_folder}/{waterschap}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py new file mode 100644 index 0000000..4eab36f --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py @@ -0,0 +1,285 @@ +# # Wetterskip + +# This script adds a new column "peilgebied_cat" and make sure the peilgebieden neatly match the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> boezem +# - peilgebied_cat = 2 -> HWS + + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## Set Paths + + +# define relative paths +waterschap = "Wetterskip" +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_wetterskip.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}/" +# Dm netwerk +boezem_path = "/DATAFOLDER/projects/4750_30/Data_overig/DM_netwerk/waterschapsgrenzen_boezem_netwerk.shp" + + +# ## Load Files + + +# Load HHNK files +Wetterskip = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +Wetterskip["peilgebied"] = Wetterskip["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) + +gdf_rhws = gpd.read_file(boezem_path) + + +# ## Select waterschap boundaries and clip hws layer + + +# # Select boundaries HH Amstel, Gooi en Vecht +# gdf_grens = gdf_grens.loc[['Wetterskip Fryslân']] + +# # Use waterschap boudnaries to clip HWS layer +# gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how='intersection') + + +# ## Check Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# # Step 1: Identify the Overlapping Areas and clip +# overlaps = gpd.overlay(Wetterskip['peilgebied'], gdf_hws, how='intersection', keep_geom_type=True) + +# # # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +# non_overlapping_peilgebied = gpd.overlay(Wetterskip['peilgebied'], overlaps, how='difference', keep_geom_type=True) +# overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how='intersection', keep_geom_type=False) + +# # Step 3: Calculate Area Percentages +# # Calculate the area of overlaps +# overlaps['overlap_area'] = overlaps.area + +# # Step 4: Filter based on area Area Percentages +# minimum_area = 500 +# print(f'Number of overlapping shapes without filter: {len(overlaps)}') +# overlap_ids = overlaps.loc[overlaps['overlap_area'] > minimum_area] +# overlap_ids = overlap_ids.globalid.to_list() +# print(f'Number of overlapping shapes with filter: {len(overlap_ids)}') + + +# ## Create peilgebied_cat column + + +# Add to geodataframe +peilgebieden_cat = [] + +for index, row in Wetterskip["streefpeil"].iterrows(): + if round(row.waterhoogte, 2) == -0.52: + peilgebieden_cat.append(1) + else: + peilgebieden_cat.append(0) + +Wetterskip["peilgebied"]["peilgebied_cat"] = peilgebieden_cat + + +# ## Add nhws to ['peilgebied','streefpeil'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +Wetterskip["peilgebied"] = pd.concat([gdf_hws, Wetterskip["peilgebied"]]) + + +# update streefpeil dict key +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +Wetterskip["streefpeil"] = pd.concat([streefpeil_hws, Wetterskip["streefpeil"]]) +Wetterskip["streefpeil"] = gpd.GeoDataFrame(Wetterskip["streefpeil"]) + + +# ### Create buffer polygon between NHWS and peilgebied/RHWS + + +# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0]) +# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist())) +# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(Wetterskip['peilgebied'].geometry.tolist())) + +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon = buffer_polygon.set_geometry(0) +# buffer_polygon = buffer_polygon.dissolve() +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon.set_geometry('geometry') +# buffer_polygon = buffer_polygon.set_crs('EPSG:28992') + + +# ### Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# Wetterskip['peilgebied'] = pd.concat([buffer_polygon, Wetterskip['peilgebied']]) +# Wetterskip['peilgebied'] = gpd.GeoDataFrame(Wetterskip['peilgebied']) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# streefpeil_buffer['geometry'] = [None] + + +# Wetterskip['streefpeil'] = pd.concat([streefpeil_buffer, Wetterskip['streefpeil']]) +# Wetterskip['streefpeil'] = gpd.GeoDataFrame(Wetterskip['streefpeil']) + + +# ## Wetterskip data contains many duplicate peilgebieden + +# ### Calculate polygons that overlap with more than 90 % of their area + + +# from itertools import combinations +# gdf = Wetterskip['peilgebied'] +# # gdf = test +# gdf = gdf[6:] + +# # Initialize a list to store index pairs with more than 90% overlap +# overlapping_pairs = [] + +# # Iterate through each unique pair of geometries +# for idx1, idx2 in combinations(gdf.index, 2): +# print(f'Processing {idx1} out of {len(gdf)}...', end='\r') +# geom1 = gdf.at[idx1, 'geometry'] +# geom2 = gdf.at[idx2, 'geometry'] + +# # Calculate intersection +# intersection = geom1.intersection(geom2) +# intersection_area = intersection.area + +# # Calculate original areas +# area1 = geom1.area +# area2 = geom2.area + +# # Calculate intersection percentage for each geometry +# intersection_percentage1 = (intersection_area / area1) * 100 +# intersection_percentage2 = (intersection_area / area2) * 100 + +# # Check if both geometries overlap more than 90% +# if intersection_percentage1 > 90 and intersection_percentage2 > 90: +# # print((idx1, idx2)) +# overlapping_pairs.append((idx1, idx2)) + +# idx1s = [] +# idx2s = [] + +# glob_1s = [] +# glob_2s = [] + + +# for idx1, idx2 in overlapping_pairs: + +# idx1s.append(idx1) +# idx2s.append(idx2) + +# glob_1s.append(gdf.iloc[idx1].globalid) +# glob_2s.append(gdf.iloc[idx2].globalid) + +# df = pd.DataFrame() +# df['idx1'] = idx1s +# df['idx2'] = idx2s +# df['globalid_1'] = glob_1s +# df['globalid_2'] = glob_2s + +# df.to_csv('./overlapping_wetterskip.csv') + + +# df = pd.read_csv('./overlapping_wetterskip.csv') + + +# ### Create list of duplicates for removal + + +# numbers_to_remove = [] + +# # Go loop unique index values +# for number in df['idx1'].unique(): +# if number in numbers_to_remove: +# continue + +# # Find all combinations +# associated_idx2 = df[df['idx1'] == number]['idx2'].tolist() +# # Append combinations +# numbers_to_remove.extend(associated_idx2) + +# # Remove duplicates using set operation +# numbers_to_remove = list(set(numbers_to_remove)) + + +# ### Remove duplicates + + +# Wetterskip['peilgebied'] = Wetterskip['peilgebied'][~Wetterskip['peilgebied'].index.isin(numbers_to_remove)] +# Wetterskip['streefpeil'] = Wetterskip['streefpeil'][~Wetterskip['streefpeil'].index.isin(numbers_to_remove)] + + +Wetterskip["streefpeil"]["globalid"].is_unique + + +if remove_cat_2: + Wetterskip["peilgebied"] = Wetterskip["peilgebied"].loc[Wetterskip["peilgebied"].peilgebied_cat != 2] + + +# ## Store data + + +for key in Wetterskip.keys(): + print(key) + Wetterskip[str(key)].to_file(f"{output_folder}/{waterschap}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.ipynb deleted file mode 100644 index 7625396..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.ipynb +++ /dev/null @@ -1,440 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "tags": [] - }, - "source": [ - "# Zuiderzeeland" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer:\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Zuiderzeeland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Zuiderzeeland\"\n", - "waterschap2 = \"Zuiderzeeland\"\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_zuiderzeeland.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "### Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "Zuiderzeeland = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "Zuiderzeeland[\"peilgebied\"] = Zuiderzeeland[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "Zuiderzeeland[\"peilgebied\"].globalid.is_unique" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"Waterschap Zuiderzeeland\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "Zuiderzeeland[\"peilgebied\"]" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "## Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(Zuiderzeeland[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(Zuiderzeeland[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 100\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "Zuiderzeeland[\"peilgebied\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "for index, row in Zuiderzeeland[\"peilgebied\"].iterrows():\n", - " if \"LVA.01\" in row.code:\n", - " peilgebieden_cat.append(1)\n", - " print(\"yes\")\n", - " elif \"3.01\" in row.code:\n", - " peilgebieden_cat.append(1)\n", - " print(\"yes\")\n", - " elif \"LAGE AFDELING\" in row.code:\n", - " print(\"yes\")\n", - " peilgebieden_cat.append(1)\n", - "\n", - " elif \"HOGE AFDELING\" in row.code:\n", - " print(\"yes\")\n", - " peilgebieden_cat.append(1)\n", - "\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - "\n", - "# Add new column and drop old HWS_BZM column\n", - "Zuiderzeeland[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat\n", - "# Zuiderzeeland['peilgebied'] = Zuiderzeeland['peilgebied'].drop(columns=['HWS_BZM'])" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "Zuiderzeeland[\"peilgebied\"] = pd.concat([gdf_hws, Zuiderzeeland[\"peilgebied\"]])" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "## Add HWS to ['streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "Zuiderzeeland[\"streefpeil\"] = pd.concat([streefpeil_hws, Zuiderzeeland[\"streefpeil\"]])\n", - "Zuiderzeeland[\"streefpeil\"] = gpd.GeoDataFrame(Zuiderzeeland[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0])\n", - "# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist()))\n", - "# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(Zuiderzeeland['peilgebied'].geometry.tolist()))\n", - "\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon = buffer_polygon.set_geometry(0)\n", - "# buffer_polygon = buffer_polygon.dissolve()\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon.set_geometry('geometry')\n", - "# buffer_polygon = buffer_polygon.set_crs('EPSG:28992')" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": { - "tags": [] - }, - "source": [ - "## Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# Zuiderzeeland['peilgebied'] = pd.concat([buffer_polygon, Zuiderzeeland['peilgebied']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1']\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# Zuiderzeeland['streefpeil'] = pd.concat([streefpeil_buffer, Zuiderzeeland['streefpeil']])\n", - "# Zuiderzeeland['streefpeil'] = gpd.GeoDataFrame(Zuiderzeeland['streefpeil'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " Zuiderzeeland[\"peilgebied\"] = Zuiderzeeland[\"peilgebied\"].loc[Zuiderzeeland[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "## Store output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "Zuiderzeeland[\"peilgebied\"].globalid.is_unique" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "for key in Zuiderzeeland.keys():\n", - " print(key)\n", - " Zuiderzeeland[str(key)].to_file(f\"{output_folder}/{waterschap2}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py new file mode 100644 index 0000000..cc3577d --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py @@ -0,0 +1,215 @@ +# # Zuiderzeeland + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer: +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS + + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## Zuiderzeeland + + +# define relative paths +waterschap = "Zuiderzeeland" +waterschap2 = "Zuiderzeeland" +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_zuiderzeeland.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}" + + +# ### Load Files + + +# Load HHNK files +Zuiderzeeland = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +Zuiderzeeland["peilgebied"] = Zuiderzeeland["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) + + +Zuiderzeeland["peilgebied"].globalid.is_unique + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["Waterschap Zuiderzeeland"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +Zuiderzeeland["peilgebied"] + + +# ## Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(Zuiderzeeland["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(Zuiderzeeland["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 100 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + + +Zuiderzeeland["peilgebied"] + + +# Add occurence to geodataframe +peilgebieden_cat = [] + +for index, row in Zuiderzeeland["peilgebied"].iterrows(): + if "LVA.01" in row.code: + peilgebieden_cat.append(1) + print("yes") + elif "3.01" in row.code: + peilgebieden_cat.append(1) + print("yes") + elif "LAGE AFDELING" in row.code: + print("yes") + peilgebieden_cat.append(1) + + elif "HOGE AFDELING" in row.code: + print("yes") + peilgebieden_cat.append(1) + + else: + peilgebieden_cat.append(0) + + +# Add new column and drop old HWS_BZM column +Zuiderzeeland["peilgebied"]["peilgebied_cat"] = peilgebieden_cat +# Zuiderzeeland['peilgebied'] = Zuiderzeeland['peilgebied'].drop(columns=['HWS_BZM']) + + +# ## Add nhws to ['peilgebied'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +Zuiderzeeland["peilgebied"] = pd.concat([gdf_hws, Zuiderzeeland["peilgebied"]]) + + +# ## Add HWS to ['streefpeil'] + + +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +Zuiderzeeland["streefpeil"] = pd.concat([streefpeil_hws, Zuiderzeeland["streefpeil"]]) +Zuiderzeeland["streefpeil"] = gpd.GeoDataFrame(Zuiderzeeland["streefpeil"]) + + +# ### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer + + +# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0]) +# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist())) +# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(Zuiderzeeland['peilgebied'].geometry.tolist())) + +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon = buffer_polygon.set_geometry(0) +# buffer_polygon = buffer_polygon.dissolve() +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon.set_geometry('geometry') +# buffer_polygon = buffer_polygon.set_crs('EPSG:28992') + + +# ## Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 + +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# Zuiderzeeland['peilgebied'] = pd.concat([buffer_polygon, Zuiderzeeland['peilgebied']]) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1'] +# streefpeil_buffer['geometry'] = [None] + + +# Zuiderzeeland['streefpeil'] = pd.concat([streefpeil_buffer, Zuiderzeeland['streefpeil']]) +# Zuiderzeeland['streefpeil'] = gpd.GeoDataFrame(Zuiderzeeland['streefpeil']) + + +if remove_cat_2: + Zuiderzeeland["peilgebied"] = Zuiderzeeland["peilgebied"].loc[Zuiderzeeland["peilgebied"].peilgebied_cat != 2] + + +# ## Store output + + +Zuiderzeeland["peilgebied"].globalid.is_unique + + +for key in Zuiderzeeland.keys(): + print(key) + Zuiderzeeland[str(key)].to_file(f"{output_folder}/{waterschap2}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.ipynb deleted file mode 100644 index edcae23..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.ipynb +++ /dev/null @@ -1,447 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Hollandse Delta" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Delfland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Hollandse_Delta\"\n", - "waterschap2 = \"HD\"\n", - "\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap2}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_hd.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "### Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "HD = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "HD[\"peilgebied\"] = HD[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)\n", - "gdf_buffer = gdf_buffer.to_crs(\"EPSG:28992\")\n", - "gdf_buffer = gdf_buffer.dissolve()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "HD[\"peilgebied\"].globalid.is_unique" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"Waterschap Hollandse Delta\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "## Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(HD[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(HD[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 500\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "## Create peilgebied_cat column" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "for index, row in HD[\"peilgebied\"].iterrows():\n", - " if row.code == \"Zuiderdiepboezem_164\":\n", - " peilgebieden_cat.append(1)\n", - " elif row.code == \"Zuiderdiepboezem_163\":\n", - " peilgebieden_cat.append(1)\n", - " elif row.code == \"Zoetwaterboezem_571\":\n", - " peilgebieden_cat.append(1)\n", - " elif row.code == \"Kanaal door Voorne_570\":\n", - " peilgebieden_cat.append(1)\n", - " elif row.code == \"Binnenbedijkte Maas_290\":\n", - " peilgebieden_cat.append(1)\n", - " elif row.code == \"Boezemloozende door Strijensas_333\":\n", - " peilgebieden_cat.append(1)\n", - " # elif row.code == 'Zuiderdiepboezem':\n", - " # peilgebieden_cat.append(1)\n", - " elif row.code == \"Kreekkade_660\":\n", - " peilgebieden_cat.append(1)\n", - " elif row.code == \"Zwijndrechtse Waard_703\":\n", - " peilgebieden_cat.append(1)\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - " # if row.code == 'Zuiderdiepboezem en havenkanaal Dirksland':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Havenkanaal van Goedereede':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Zoetwaterboezem':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Kanaal door Voorne':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Binnenbedijkte Maas':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Boezemloozende door Strijensas':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Zuiderdiepboezem':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Kreekkade':\n", - " # peilgebieden_cat.append(1)\n", - " # elif row.code == 'Zwijndrechtse Waard':\n", - " # peilgebieden_cat.append(1)\n", - " # else:\n", - " # peilgebieden_cat.append(0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# Add new column\n", - "HD[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "HD[\"peilgebied\"][\"peilgebied_cat\"].unique()" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "HD[\"peilgebied\"] = pd.concat([gdf_hws, HD[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "HD[\"streefpeil\"] = pd.concat([streefpeil_hws, HD[\"streefpeil\"]])\n", - "HD[\"streefpeil\"] = gpd.GeoDataFrame(HD[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create buffer polygon\n", - "# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True)\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True)\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, HD['peilgebied'], how='difference', keep_geom_type=True)" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "## Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# HD['peilgebied'] = pd.concat([buffer_polygon, HD['peilgebied']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1']\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# HD['streefpeil'] = pd.concat([streefpeil_buffer, HD['streefpeil']])\n", - "# HD['streefpeil'] = gpd.GeoDataFrame(HD['streefpeil'])\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " HD[\"peilgebied\"] = HD[\"peilgebied\"].loc[HD[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "## Store output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "for key in HD.keys():\n", - " print(key)\n", - " HD[str(key)].to_file(f\"{output_folder}/{waterschap2}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py new file mode 100644 index 0000000..ec46b06 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py @@ -0,0 +1,230 @@ +# # Hollandse Delta + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS + + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## Delfland + + +# define relative paths +waterschap = "Hollandse_Delta" +waterschap2 = "HD" + +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap2}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_hd.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}" + + +# ### Load Files + + +# Load HHNK files +HD = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +HD["peilgebied"] = HD["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) +gdf_buffer = gdf_buffer.to_crs("EPSG:28992") +gdf_buffer = gdf_buffer.dissolve() + + +HD["peilgebied"].globalid.is_unique + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["Waterschap Hollandse Delta"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +# ## Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(HD["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(HD["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 500 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + + +# ## Create peilgebied_cat column + + +# Add occurence to geodataframe +peilgebieden_cat = [] + +for index, row in HD["peilgebied"].iterrows(): + if row.code == "Zuiderdiepboezem_164": + peilgebieden_cat.append(1) + elif row.code == "Zuiderdiepboezem_163": + peilgebieden_cat.append(1) + elif row.code == "Zoetwaterboezem_571": + peilgebieden_cat.append(1) + elif row.code == "Kanaal door Voorne_570": + peilgebieden_cat.append(1) + elif row.code == "Binnenbedijkte Maas_290": + peilgebieden_cat.append(1) + elif row.code == "Boezemloozende door Strijensas_333": + peilgebieden_cat.append(1) + # elif row.code == 'Zuiderdiepboezem': + # peilgebieden_cat.append(1) + elif row.code == "Kreekkade_660": + peilgebieden_cat.append(1) + elif row.code == "Zwijndrechtse Waard_703": + peilgebieden_cat.append(1) + else: + peilgebieden_cat.append(0) + + # if row.code == 'Zuiderdiepboezem en havenkanaal Dirksland': + # peilgebieden_cat.append(1) + # elif row.code == 'Havenkanaal van Goedereede': + # peilgebieden_cat.append(1) + # elif row.code == 'Zoetwaterboezem': + # peilgebieden_cat.append(1) + # elif row.code == 'Kanaal door Voorne': + # peilgebieden_cat.append(1) + # elif row.code == 'Binnenbedijkte Maas': + # peilgebieden_cat.append(1) + # elif row.code == 'Boezemloozende door Strijensas': + # peilgebieden_cat.append(1) + # elif row.code == 'Zuiderdiepboezem': + # peilgebieden_cat.append(1) + # elif row.code == 'Kreekkade': + # peilgebieden_cat.append(1) + # elif row.code == 'Zwijndrechtse Waard': + # peilgebieden_cat.append(1) + # else: + # peilgebieden_cat.append(0) + + +# Add new column +HD["peilgebied"]["peilgebied_cat"] = peilgebieden_cat + + +HD["peilgebied"]["peilgebied_cat"].unique() + + +# ## Add nhws to ['peilgebied','streefpeil'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +HD["peilgebied"] = pd.concat([gdf_hws, HD["peilgebied"]]) + + +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +HD["streefpeil"] = pd.concat([streefpeil_hws, HD["streefpeil"]]) +HD["streefpeil"] = gpd.GeoDataFrame(HD["streefpeil"]) + + +# ### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer + + +# # Create buffer polygon +# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True) +# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True) +# buffer_polygon = gpd.overlay(buffer_polygon, HD['peilgebied'], how='difference', keep_geom_type=True) + + +# ## Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 + +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# HD['peilgebied'] = pd.concat([buffer_polygon, HD['peilgebied']]) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1'] +# streefpeil_buffer['geometry'] = [None] + + +# HD['streefpeil'] = pd.concat([streefpeil_buffer, HD['streefpeil']]) +# HD['streefpeil'] = gpd.GeoDataFrame(HD['streefpeil']) + + +if remove_cat_2: + HD["peilgebied"] = HD["peilgebied"].loc[HD["peilgebied"].peilgebied_cat != 2] + + +# ## Store output + + +for key in HD.keys(): + print(key) + HD[str(key)].to_file(f"{output_folder}/{waterschap2}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.ipynb deleted file mode 100644 index 8fdc1ba..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.ipynb +++ /dev/null @@ -1,456 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# HHNK" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Set Paths" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"HHNK\"\n", - "waterschap2 = \"Noorderkwartier\"\n", - "\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap2}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_hhnk.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "## Load files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "HHNK = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "HHNK[\"peilgebied\"] = HHNK[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"HH Hollands Noorderkwartier\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "## Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# Plot\n", - "fig, ax = plt.subplots()\n", - "\n", - "HHNK[\"peilgebied\"].plot(ax=ax)\n", - "gdf_grens.plot(ax=ax, color=\"red\", alpha=0.5)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "HHNK[\"peilgebied\"].geometry.type.unique()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "HHNK[\"peilgebied\"].geometry = HHNK[\"peilgebied\"].buffer(0)\n", - "gdf_grens.geometry = gdf_grens.buffer(0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip.\n", - "HHNK[\"peilgebied\"] = gpd.overlay(HHNK[\"peilgebied\"], gdf_grens, how=\"intersection\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(HHNK[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(HHNK[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 20000\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "## Create peilgebied_cat column" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "# code_list = [\"dummy_code_5188\",\"dummy_code_5161\",\"dummy_code_5210\",\"dummy_code_4352\",\"dummy_code_5164\",\"dummy_code_5200\",\"dummy_code_5167\",\"dummy_code_37\"]\n", - "\n", - "\n", - "for index, row in HHNK[\"peilgebied\"].iterrows():\n", - " if row.HWS_BZM:\n", - " print(\"yes\")\n", - " peilgebieden_cat.append(1)\n", - "\n", - " # elif row.HWS_BZM is True:\n", - " # peilgebieden_cat.append(1)\n", - "\n", - " else:\n", - " peilgebieden_cat.append(0)\n", - "\n", - "# Add new column and drop old HWS_BZM column\n", - "HHNK[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat\n", - "# HHNK['peilgebied'] = HHNK['peilgebied'].drop(columns=['HWS_BZM'])" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "HHNK[\"peilgebied\"] = pd.concat([gdf_hws, HHNK[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "HHNK[\"streefpeil\"] = pd.concat([streefpeil_hws, HHNK[\"streefpeil\"]])\n", - "HHNK[\"streefpeil\"] = gpd.GeoDataFrame(HHNK[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "### Create layer that ensures spatial match between peilgebied and hws layers based on the buffer layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create buffer polygon\n", - "# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True)\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True)\n", - "\n", - "\n", - "# # Apply a small buffer to both GeoDataFrames to \"clean\" the geometries\n", - "# buffer_polygon.geometry = buffer_polygon.buffer(0.001).buffer(-0.001)\n", - "# HHNK['peilgebied'].geometry = HHNK['peilgebied'].buffer(0.001).buffer(-0.001)\n", - "\n", - "# # Try the overlay operation again\n", - "# try:\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, HHNK['peilgebied'], how='difference', keep_geom_type=True)\n", - "# print(\"Overlay operation successful.\")\n", - "# except Exception as e:\n", - "# print(f\"Overlay operation failed: {e}\")" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "## Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# HHNK['peilgebied'] = pd.concat([buffer_polygon, HHNK['peilgebied']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1']\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# HHNK['streefpeil'] = pd.concat([streefpeil_buffer, HHNK['streefpeil']])\n", - "# HHNK['streefpeil'] = gpd.GeoDataFrame(HHNK['streefpeil'])\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "HHNK[\"peilgebied\"] = HHNK[\"peilgebied\"][[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " HHNK[\"peilgebied\"] = HHNK[\"peilgebied\"].loc[HHNK[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": {}, - "source": [ - "## Write output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "for key in HHNK.keys():\n", - " print(key)\n", - " HHNK[str(key)].to_file(f\"{output_folder}/{waterschap2}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "HHNK[\"peilgebied\"][\"peilgebied_cat\"].unique()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py new file mode 100644 index 0000000..29eecc7 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py @@ -0,0 +1,225 @@ +# # HHNK + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS + + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## Set Paths + + +# define relative paths +waterschap = "HHNK" +waterschap2 = "Noorderkwartier" + +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap2}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_hhnk.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}" + + +# ## Load files + + +# Load HHNK files +HHNK = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +HHNK["peilgebied"] = HHNK["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["HH Hollands Noorderkwartier"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +# ## Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# Plot +fig, ax = plt.subplots() + +HHNK["peilgebied"].plot(ax=ax) +gdf_grens.plot(ax=ax, color="red", alpha=0.5) + + +HHNK["peilgebied"].geometry.type.unique() + + +HHNK["peilgebied"].geometry = HHNK["peilgebied"].buffer(0) +gdf_grens.geometry = gdf_grens.buffer(0) + + +# Step 1: Identify the Overlapping Areas and clip. +HHNK["peilgebied"] = gpd.overlay(HHNK["peilgebied"], gdf_grens, how="intersection", keep_geom_type=True) +overlaps = gpd.overlay(HHNK["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(HHNK["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 20000 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + + +# ## Create peilgebied_cat column + + +# Add occurence to geodataframe +peilgebieden_cat = [] + +# code_list = ["dummy_code_5188","dummy_code_5161","dummy_code_5210","dummy_code_4352","dummy_code_5164","dummy_code_5200","dummy_code_5167","dummy_code_37"] + + +for index, row in HHNK["peilgebied"].iterrows(): + if row.HWS_BZM: + print("yes") + peilgebieden_cat.append(1) + + # elif row.HWS_BZM is True: + # peilgebieden_cat.append(1) + + else: + peilgebieden_cat.append(0) + +# Add new column and drop old HWS_BZM column +HHNK["peilgebied"]["peilgebied_cat"] = peilgebieden_cat +# HHNK['peilgebied'] = HHNK['peilgebied'].drop(columns=['HWS_BZM']) + + +# ## Add nhws to ['peilgebied','streefpeil'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +HHNK["peilgebied"] = pd.concat([gdf_hws, HHNK["peilgebied"]]) + + +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +HHNK["streefpeil"] = pd.concat([streefpeil_hws, HHNK["streefpeil"]]) +HHNK["streefpeil"] = gpd.GeoDataFrame(HHNK["streefpeil"]) + + +# ### Create layer that ensures spatial match between peilgebied and hws layers based on the buffer layer + + +# # Create buffer polygon +# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True) +# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True) + + +# # Apply a small buffer to both GeoDataFrames to "clean" the geometries +# buffer_polygon.geometry = buffer_polygon.buffer(0.001).buffer(-0.001) +# HHNK['peilgebied'].geometry = HHNK['peilgebied'].buffer(0.001).buffer(-0.001) + +# # Try the overlay operation again +# try: +# buffer_polygon = gpd.overlay(buffer_polygon, HHNK['peilgebied'], how='difference', keep_geom_type=True) +# print("Overlay operation successful.") +# except Exception as e: +# print(f"Overlay operation failed: {e}") + + +# ## Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 + +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# HHNK['peilgebied'] = pd.concat([buffer_polygon, HHNK['peilgebied']]) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1'] +# streefpeil_buffer['geometry'] = [None] + + +# HHNK['streefpeil'] = pd.concat([streefpeil_buffer, HHNK['streefpeil']]) +# HHNK['streefpeil'] = gpd.GeoDataFrame(HHNK['streefpeil']) + + +HHNK["peilgebied"] = HHNK["peilgebied"][["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + + +if remove_cat_2: + HHNK["peilgebied"] = HHNK["peilgebied"].loc[HHNK["peilgebied"].peilgebied_cat != 2] + + +# ## Write output + + +for key in HHNK.keys(): + print(key) + HHNK[str(key)].to_file(f"{output_folder}/{waterschap2}.gpkg", layer=str(key), driver="GPKG") + + +HHNK["peilgebied"]["peilgebied_cat"].unique() diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.ipynb deleted file mode 100644 index e48b119..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.ipynb +++ /dev/null @@ -1,508 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# HHSK" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and make sure the peilgebieden allign witgh the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS Notes:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import shapely\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## HHSK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"HHSK\"\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = r\"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_HHSK.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "## Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "HHSK = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "HHSK[\"peilgebied\"] = HHSK[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)\n", - "gdf_buffer = gdf_buffer.to_crs(\"EPSG:28992\")\n", - "gdf_buffer = gdf_buffer.dissolve()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "print(len(HHSK[\"duikersifonhevel\"].globalid.unique()))\n", - "print(len(HHSK[\"duikersifonhevel\"].globalid))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"peilgebied\"].globalid.is_unique" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "len(HHSK[\"hydroobject\"])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# HHSK['hydroobject'] = HHSK['hydroobject'].explode(ignore_index=False, index_parts=True)\n", - "HHSK[\"hydroobject\"][\"geometry\"] = HHSK[\"hydroobject\"].make_valid()\n", - "HHSK[\"hydroobject\"][\"geometry\"] = HHSK[\"hydroobject\"].geometry.apply(shapely.force_2d)\n", - "HHSK[\"hydroobject\"] = HHSK[\"hydroobject\"][~HHSK[\"hydroobject\"].is_empty].copy()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"hydroobject\"] = HHSK[\"hydroobject\"].drop_duplicates(subset=\"geometry\", keep=\"first\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "len(HHSK[\"hydroobject\"])" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"Schieland en de Krimpenerwaard\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "## Check Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(HHSK[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "gdf_hws = gpd.overlay(gdf_hws, HHSK[\"peilgebied\"], how=\"difference\")\n", - "\n", - "# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(HHSK[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 50\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")\n", - "\n", - "# gdf_hws = gdf_hws_clipped" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "## Create peilgebied_cat column" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# list(HHSK['peilgebied'][HHSK['peilgebied'].code.str.contains('boezem')].code.unique())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "# # Add to geodataframe\n", - "# peilgebieden_cat = []\n", - "\n", - "\n", - "# # code_list = [\"dummy_id_78_dummy_id_78\",\"PPG-48_dummy_id_196_dummy_id_196\",\"PPG-49_dummy_id_85_dummy_id_85\",\"PPG-237_dummy_id_148_dummy_id_148\",\"PPG-1040_dummy_id_125_dummy_id_125\"]\n", - "# # code_list = [\"dummy_code_peilgebied_486\",\"dummy_code_peilgebied_450\",\"dummy_code_peilgebied_906\",\"dummy_code_peilgebied_1060\",\"dummy_code_peilgebied_552\",\"dummy_code_peilgebied_953\",\n", - "# # \"dummy_code_peilgebied_216\",\"dummy_code_peilgebied_544\",\"dummy_code_peilgebied_5\",\"dummy_code_peilgebied_480\",\"dummy_code_peilgebied_308\",\"dummy_code_peilgebied_677\",\n", - "# # \"dummy_code_peilgebied_1053\"]\n", - "\n", - "# code_list = list(HHSK['peilgebied'][HHSK['peilgebied'].code.str.contains('boezem')].code.unique())\n", - "\n", - "# for index, row in HHSK['peilgebied'].iterrows():\n", - "# # print(row.code)\n", - "# # if row.code in code_list:\n", - "# if 'boezem' in row.code:\n", - "# print('appending_boezem')\n", - "# peilgebieden_cat.append(1)\n", - "\n", - "# else:\n", - "# peilgebieden_cat.append(0)\n", - "\n", - "# HHSK['peilgebied']['peilgebied_cat'] = peilgebieden_cat" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"peilgebied\"][\"peilgebied_cat\"] = 0" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"peilgebied\"].loc[HHSK[\"peilgebied\"].code.str.contains(\"GPG-399\"), \"peilgebied_cat\"] = 1\n", - "HHSK[\"peilgebied\"].loc[HHSK[\"peilgebied\"].code.str.contains(\"GPG-1005\"), \"peilgebied_cat\"] = 1\n", - "HHSK[\"peilgebied\"].loc[HHSK[\"peilgebied\"].code.str.contains(\"GPG-1360\"), \"peilgebied_cat\"] = 1\n", - "HHSK[\"peilgebied\"].loc[HHSK[\"peilgebied\"].code.str.contains(\"GPG-1012\"), \"peilgebied_cat\"] = 1" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": { - "tags": [] - }, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# gdf_hws['globalid'] = 'dummy_globalid_nhws_' + gdf_hws.index.astype(str)\n", - "# gdf_hws['code'] = 'dummy_code_nhws_' + gdf_hws.index.astype(str)\n", - "# gdf_hws['nen3610id'] = 'dummy_nen3610id_nhws_' + gdf_hws.index.astype(str)\n", - "# gdf_hws['peilgebied_cat'] = 2\n", - "\n", - "# gdf_hws = gdf_hws[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# HHSK['peilgebied'] = pd.concat([gdf_hws, HHSK['peilgebied']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "# # update streefpeil dict key\n", - "# streefpeil_hws = pd.DataFrame()\n", - "# streefpeil_hws['waterhoogte'] = [np.nan] * len(gdf_hws)\n", - "# streefpeil_hws['globalid'] = 'dummy_globalid_nhws_' + gdf_hws.index.astype(str)\n", - "# streefpeil_hws['geometry'] = [None]* len(gdf_hws)\n", - "\n", - "# HHSK['streefpeil'] = pd.concat([streefpeil_hws, HHSK['streefpeil']])\n", - "# HHSK['streefpeil'] = gpd.GeoDataFrame(HHSK['streefpeil'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"peilgebied\"][\"peilgebied_cat\"].unique()" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": {}, - "source": [ - "### Create buffer polygon between NHWS and peilgebied/RHWS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0])\n", - "# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist()))\n", - "# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(HHSK['peilgebied'].geometry.tolist()))\n", - "\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon = buffer_polygon.set_geometry(0)\n", - "# buffer_polygon = buffer_polygon.dissolve()\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon.set_geometry('geometry')\n", - "# buffer_polygon = buffer_polygon.set_crs('EPSG:28992')" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": {}, - "source": [ - "### Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon = gpd.GeoDataFrame(buffer_polygon)\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + '1'\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'})\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# HHSK['peilgebied'] = pd.concat([buffer_polygon, HHSK['peilgebied']])\n", - "# HHSK['peilgebied'] = gpd.GeoDataFrame(HHSK['peilgebied'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1']\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "\n", - "# HHSK['streefpeil'] = pd.concat([streefpeil_buffer, HHSK['streefpeil']])\n", - "# HHSK['streefpeil'] = gpd.GeoDataFrame(HHSK['streefpeil'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " HHSK[\"peilgebied\"] = HHSK[\"peilgebied\"].loc[HHSK[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, - "source": [ - "### Store post-processed data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "for key in HHSK.keys():\n", - " print(key)\n", - " HHSK[str(key)].to_file(f\"{output_folder}/{waterschap}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py new file mode 100644 index 0000000..23bbcdd --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py @@ -0,0 +1,244 @@ +# # HHSK + +# This script adds a new column "peilgebied_cat" and make sure the peilgebieden allign witgh the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS Notes: +# + + +import geopandas as gpd +import shapely +from general_functions import * + +remove_cat_2 = True + + +# ## HHSK + + +# define relative paths +waterschap = "HHSK" +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = r"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_HHSK.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}" + + +# ## Load Files + + +# Load HHNK files +HHSK = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +HHSK["peilgebied"] = HHSK["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) +gdf_buffer = gdf_buffer.to_crs("EPSG:28992") +gdf_buffer = gdf_buffer.dissolve() + + +print(len(HHSK["duikersifonhevel"].globalid.unique())) +print(len(HHSK["duikersifonhevel"].globalid)) + + +HHSK["peilgebied"].globalid.is_unique + + +len(HHSK["hydroobject"]) + + +# HHSK['hydroobject'] = HHSK['hydroobject'].explode(ignore_index=False, index_parts=True) +HHSK["hydroobject"]["geometry"] = HHSK["hydroobject"].make_valid() +HHSK["hydroobject"]["geometry"] = HHSK["hydroobject"].geometry.apply(shapely.force_2d) +HHSK["hydroobject"] = HHSK["hydroobject"][~HHSK["hydroobject"].is_empty].copy() + + +HHSK["hydroobject"] = HHSK["hydroobject"].drop_duplicates(subset="geometry", keep="first") + + +len(HHSK["hydroobject"]) + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["Schieland en de Krimpenerwaard"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +# ## Check Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(HHSK["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) +gdf_hws = gpd.overlay(gdf_hws, HHSK["peilgebied"], how="difference") + +# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(HHSK["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 50 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + +# gdf_hws = gdf_hws_clipped + + +# ## Create peilgebied_cat column + + +# list(HHSK['peilgebied'][HHSK['peilgebied'].code.str.contains('boezem')].code.unique()) + + +# # Add to geodataframe +# peilgebieden_cat = [] + + +# # code_list = ["dummy_id_78_dummy_id_78","PPG-48_dummy_id_196_dummy_id_196","PPG-49_dummy_id_85_dummy_id_85","PPG-237_dummy_id_148_dummy_id_148","PPG-1040_dummy_id_125_dummy_id_125"] +# # code_list = ["dummy_code_peilgebied_486","dummy_code_peilgebied_450","dummy_code_peilgebied_906","dummy_code_peilgebied_1060","dummy_code_peilgebied_552","dummy_code_peilgebied_953", +# # "dummy_code_peilgebied_216","dummy_code_peilgebied_544","dummy_code_peilgebied_5","dummy_code_peilgebied_480","dummy_code_peilgebied_308","dummy_code_peilgebied_677", +# # "dummy_code_peilgebied_1053"] + +# code_list = list(HHSK['peilgebied'][HHSK['peilgebied'].code.str.contains('boezem')].code.unique()) + +# for index, row in HHSK['peilgebied'].iterrows(): +# # print(row.code) +# # if row.code in code_list: +# if 'boezem' in row.code: +# print('appending_boezem') +# peilgebieden_cat.append(1) + +# else: +# peilgebieden_cat.append(0) + +# HHSK['peilgebied']['peilgebied_cat'] = peilgebieden_cat + + +HHSK["peilgebied"]["peilgebied_cat"] = 0 + + +HHSK["peilgebied"].loc[HHSK["peilgebied"].code.str.contains("GPG-399"), "peilgebied_cat"] = 1 +HHSK["peilgebied"].loc[HHSK["peilgebied"].code.str.contains("GPG-1005"), "peilgebied_cat"] = 1 +HHSK["peilgebied"].loc[HHSK["peilgebied"].code.str.contains("GPG-1360"), "peilgebied_cat"] = 1 +HHSK["peilgebied"].loc[HHSK["peilgebied"].code.str.contains("GPG-1012"), "peilgebied_cat"] = 1 + + +# ## Add nhws to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# gdf_hws['globalid'] = 'dummy_globalid_nhws_' + gdf_hws.index.astype(str) +# gdf_hws['code'] = 'dummy_code_nhws_' + gdf_hws.index.astype(str) +# gdf_hws['nen3610id'] = 'dummy_nen3610id_nhws_' + gdf_hws.index.astype(str) +# gdf_hws['peilgebied_cat'] = 2 + +# gdf_hws = gdf_hws[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# HHSK['peilgebied'] = pd.concat([gdf_hws, HHSK['peilgebied']]) + + +# # update streefpeil dict key +# streefpeil_hws = pd.DataFrame() +# streefpeil_hws['waterhoogte'] = [np.nan] * len(gdf_hws) +# streefpeil_hws['globalid'] = 'dummy_globalid_nhws_' + gdf_hws.index.astype(str) +# streefpeil_hws['geometry'] = [None]* len(gdf_hws) + +# HHSK['streefpeil'] = pd.concat([streefpeil_hws, HHSK['streefpeil']]) +# HHSK['streefpeil'] = gpd.GeoDataFrame(HHSK['streefpeil']) + + +HHSK["peilgebied"]["peilgebied_cat"].unique() + + +# ### Create buffer polygon between NHWS and peilgebied/RHWS + + +# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0]) +# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist())) +# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(HHSK['peilgebied'].geometry.tolist())) + +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon = buffer_polygon.set_geometry(0) +# buffer_polygon = buffer_polygon.dissolve() +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon.set_geometry('geometry') +# buffer_polygon = buffer_polygon.set_crs('EPSG:28992') + + +# ### Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + '1' +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 +# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# HHSK['peilgebied'] = pd.concat([buffer_polygon, HHSK['peilgebied']]) +# HHSK['peilgebied'] = gpd.GeoDataFrame(HHSK['peilgebied']) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1'] +# streefpeil_buffer['geometry'] = [None] + + +# HHSK['streefpeil'] = pd.concat([streefpeil_buffer, HHSK['streefpeil']]) +# HHSK['streefpeil'] = gpd.GeoDataFrame(HHSK['streefpeil']) + + +if remove_cat_2: + HHSK["peilgebied"] = HHSK["peilgebied"].loc[HHSK["peilgebied"].peilgebied_cat != 2] + + +# ### Store post-processed data + + +for key in HHSK.keys(): + print(key) + HHSK[str(key)].to_file(f"{output_folder}/{waterschap}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.ipynb b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.ipynb deleted file mode 100644 index 226dc1f..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.ipynb +++ /dev/null @@ -1,459 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Scheldestromen" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "This script adds a new column \"peilgebied_cat\" and makes sure the peilgebieden allign with the HWS layer (Daniel):\n", - "- peilgebied_cat = 0 -> peilgebied\n", - "- peigelbied_cat = 1 -> RHWS (boezem)\n", - "- peilgebied_cat = 2 -> NHWS " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "remove_cat_2 = True" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "## Scheldestromen" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Scheldestromen\"\n", - "waterschap2 = \"Scheldestromen\"\n", - "\n", - "data_path = f\"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap2}.gpkg\"\n", - "\n", - "# Waterschaps boundaries\n", - "grens_path = \"/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson\"\n", - "# Hoofdwatersysteem boundaries\n", - "hws_path = \"/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg\"\n", - "# Buffer boundaries\n", - "buffer_path = r\"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_Scheldestromen.gpkg\"\n", - "# Output folder\n", - "output_folder = f\"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}\"" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "### Load Files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# Load HHNK files\n", - "Scheldestromen = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " \"peilgebied\",\n", - " \"streefpeil\",\n", - " \"aggregation_area\",\n", - " ],\n", - ")\n", - "Scheldestromen[\"peilgebied\"] = Scheldestromen[\"peilgebied\"].to_crs(\"EPSG:28992\")\n", - "\n", - "# Load waterschap boundaries\n", - "gdf_grens = gpd.read_file(grens_path)\n", - "gdf_grens = gdf_grens.to_crs(\"EPSG:28992\")\n", - "gdf_grens = gdf_grens.set_index(\"waterschap\")\n", - "\n", - "# Load hws\n", - "gdf_hws = gpd.read_file(hws_path)\n", - "\n", - "# Load buffer\n", - "gdf_buffer = gpd.read_file(buffer_path)\n", - "gdf_buffer = gdf_buffer.to_crs(\"EPSG:28992\")\n", - "gdf_buffer = gdf_buffer.dissolve()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "Scheldestromen[\"peilgebied\"].globalid.is_unique" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "Scheldestromen[\"peilgebied\"]" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "## Select waterschap boundaries and clip hws layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# Select boundaries HH Amstel, Gooi en Vecht\n", - "gdf_grens = gdf_grens.loc[[\"Waterschap Scheldestromen\"]]\n", - "\n", - "# Use waterschap boudnaries to clip HWS layer\n", - "gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how=\"intersection\")" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "## Peilgebied and HWS layer overlap:\n", - "1. Identify the overlapping areas\n", - "2. Clip\n", - "3. Calculate overlapping area percentage\n", - "4. Filter" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# Step 1: Identify the Overlapping Areas and clip\n", - "overlaps = gpd.overlay(Scheldestromen[\"peilgebied\"], gdf_hws, how=\"intersection\", keep_geom_type=True)\n", - "\n", - "# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame\n", - "non_overlapping_peilgebied = gpd.overlay(Scheldestromen[\"peilgebied\"], overlaps, how=\"difference\", keep_geom_type=True)\n", - "overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how=\"intersection\", keep_geom_type=False)\n", - "\n", - "# Step 3: Calculate Area Percentages\n", - "# Calculate the area of overlaps\n", - "overlaps[\"overlap_area\"] = overlaps.area\n", - "\n", - "# Step 4: Filter based on area Area Percentages\n", - "minimum_area = 500\n", - "print(f\"Number of overlapping shapes without filter: {len(overlaps)}\")\n", - "overlap_ids = overlaps.loc[overlaps[\"overlap_area\"] > minimum_area]\n", - "overlap_ids = overlap_ids.globalid.to_list()\n", - "print(f\"Number of overlapping shapes with filter: {len(overlap_ids)}\")" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "## Create peilgebied_cat columnm" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# Add occurence to geodataframe\n", - "peilgebieden_cat = []\n", - "\n", - "for index, row in Scheldestromen[\"peilgebied\"].iterrows():\n", - " if row.nen3610id == \"dummy_nen3610id_peilgebied_549\":\n", - " print(True)\n", - " peilgebieden_cat.append(1)\n", - " elif \"GPG437\" in row.code:\n", - " print(\"yes\")\n", - " peilgebieden_cat.append(1)\n", - " elif \"dummy_code_nhws_3\" in row.code:\n", - " peilgebieden_cat.append(1)\n", - " print(\"yes2\")\n", - " else:\n", - " peilgebieden_cat.append(0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# Add new column\n", - "Scheldestromen[\"peilgebied\"][\"peilgebied_cat\"] = peilgebieden_cat" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "Scheldestromen[\"peilgebied\"][\"peilgebied_cat\"].unique()" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "## Add nhws to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# update peilgebied dict key\n", - "gdf_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"code\"] = \"dummy_code_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"nen3610id\"] = \"dummy_nen3610id_nhws_\" + gdf_hws.index.astype(str)\n", - "gdf_hws[\"peilgebied_cat\"] = 2\n", - "\n", - "gdf_hws = gdf_hws[[\"globalid\", \"code\", \"nen3610id\", \"peilgebied_cat\", \"geometry\"]]\n", - "\n", - "Scheldestromen[\"peilgebied\"] = pd.concat([gdf_hws, Scheldestromen[\"peilgebied\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "# Create boezem streefpeil layer\n", - "streefpeil_hws = pd.DataFrame()\n", - "streefpeil_hws[\"waterhoogte\"] = [np.nan] * len(gdf_hws)\n", - "streefpeil_hws[\"globalid\"] = \"dummy_globalid_nhws_\" + gdf_hws.index.astype(str)\n", - "streefpeil_hws[\"geometry\"] = [None] * len(gdf_hws)\n", - "\n", - "Scheldestromen[\"streefpeil\"] = pd.concat([streefpeil_hws, Scheldestromen[\"streefpeil\"]])\n", - "Scheldestromen[\"streefpeil\"] = gpd.GeoDataFrame(Scheldestromen[\"streefpeil\"])" - ] - }, - { - "cell_type": "markdown", - "id": "20", - "metadata": {}, - "source": [ - "### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create buffer polygon\n", - "# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True)\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True)\n", - "# buffer_polygon = gpd.overlay(buffer_polygon, Scheldestromen['peilgebied'], how='difference', keep_geom_type=True)" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": { - "tags": [] - }, - "source": [ - "## Add buffer to ['peilgebied','streefpeil']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "# # update peilgebied dict key\n", - "# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str)\n", - "# buffer_polygon['peilgebied_cat'] = 2\n", - "\n", - "# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']]\n", - "\n", - "# Scheldestromen['peilgebied'] = pd.concat([buffer_polygon, Scheldestromen['peilgebied']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "# # Create boezem streefpeil layer\n", - "# streefpeil_buffer = pd.DataFrame()\n", - "# streefpeil_buffer['waterhoogte'] = [np.nan]\n", - "# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1']\n", - "# streefpeil_buffer['geometry'] = [None]\n", - "\n", - "# Scheldestromen['streefpeil'] = pd.concat([streefpeil_buffer, Scheldestromen['streefpeil']])\n", - "# Scheldestromen['streefpeil'] = gpd.GeoDataFrame(Scheldestromen['streefpeil'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "if remove_cat_2:\n", - " Scheldestromen[\"peilgebied\"] = Scheldestromen[\"peilgebied\"].loc[Scheldestromen[\"peilgebied\"].peilgebied_cat != 2]" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": {}, - "source": [ - "## Store output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "for key in Scheldestromen.keys():\n", - " print(key)\n", - " Scheldestromen[str(key)].to_file(f\"{output_folder}/{waterschap2}.gpkg\", layer=str(key), driver=\"GPKG\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:stable]", - "language": "python", - "name": "conda-env-stable-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py new file mode 100644 index 0000000..55f65b7 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py @@ -0,0 +1,202 @@ +# # Scheldestromen + +# This script adds a new column "peilgebied_cat" and makes sure the peilgebieden allign with the HWS layer (Daniel): +# - peilgebied_cat = 0 -> peilgebied +# - peigelbied_cat = 1 -> RHWS (boezem) +# - peilgebied_cat = 2 -> NHWS + + +import geopandas as gpd +import numpy as np +from general_functions import * + +remove_cat_2 = True + + +# ## Scheldestromen + + +# define relative paths +waterschap = "Scheldestromen" +waterschap2 = "Scheldestromen" + +data_path = f"/DATAFOLDER/projects/4750_20/Data_postprocessed/Waterschappen/{waterschap}/{waterschap2}.gpkg" + +# Waterschaps boundaries +grens_path = "/DATAFOLDER/projects/4750_30/Data_overig/Waterschapsgrenzen/Waterschapsgrenzen.geojson" +# Hoofdwatersysteem boundaries +hws_path = "/DATAFOLDER/projects/4750_30/Data_overig/HWS/krw_basins_vlakken.gpkg" +# Buffer boundaries +buffer_path = r"/DATAFOLDER/projects/4750_30/Data_overig/HWS/hws_buffer_Scheldestromen.gpkg" +# Output folder +output_folder = f"/DATAFOLDER/projects/4750_30/Data_postprocessed/Waterschappen/{waterschap}" + + +# ### Load Files + + +# Load HHNK files +Scheldestromen = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuw", + "gemaal", + "hydroobject", + "duikersifonhevel", + "peilgebied", + "streefpeil", + "aggregation_area", + ], +) +Scheldestromen["peilgebied"] = Scheldestromen["peilgebied"].to_crs("EPSG:28992") + +# Load waterschap boundaries +gdf_grens = gpd.read_file(grens_path) +gdf_grens = gdf_grens.to_crs("EPSG:28992") +gdf_grens = gdf_grens.set_index("waterschap") + +# Load hws +gdf_hws = gpd.read_file(hws_path) + +# Load buffer +gdf_buffer = gpd.read_file(buffer_path) +gdf_buffer = gdf_buffer.to_crs("EPSG:28992") +gdf_buffer = gdf_buffer.dissolve() + + +Scheldestromen["peilgebied"].globalid.is_unique + + +Scheldestromen["peilgebied"] + + +# ## Select waterschap boundaries and clip hws layer + + +# Select boundaries HH Amstel, Gooi en Vecht +gdf_grens = gdf_grens.loc[["Waterschap Scheldestromen"]] + +# Use waterschap boudnaries to clip HWS layer +gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") + + +# ## Peilgebied and HWS layer overlap: +# 1. Identify the overlapping areas +# 2. Clip +# 3. Calculate overlapping area percentage +# 4. Filter + + +# Step 1: Identify the Overlapping Areas and clip +overlaps = gpd.overlay(Scheldestromen["peilgebied"], gdf_hws, how="intersection", keep_geom_type=True) + +# # Step 2: Subtract Overlapping Areas from the original polygons in each DataFrame +non_overlapping_peilgebied = gpd.overlay(Scheldestromen["peilgebied"], overlaps, how="difference", keep_geom_type=True) +overlaps = gpd.overlay(non_overlapping_peilgebied, gdf_hws, how="intersection", keep_geom_type=False) + +# Step 3: Calculate Area Percentages +# Calculate the area of overlaps +overlaps["overlap_area"] = overlaps.area + +# Step 4: Filter based on area Area Percentages +minimum_area = 500 +print(f"Number of overlapping shapes without filter: {len(overlaps)}") +overlap_ids = overlaps.loc[overlaps["overlap_area"] > minimum_area] +overlap_ids = overlap_ids.globalid.to_list() +print(f"Number of overlapping shapes with filter: {len(overlap_ids)}") + + +# ## Create peilgebied_cat columnm + + +# Add occurence to geodataframe +peilgebieden_cat = [] + +for index, row in Scheldestromen["peilgebied"].iterrows(): + if row.nen3610id == "dummy_nen3610id_peilgebied_549": + print(True) + peilgebieden_cat.append(1) + elif "GPG437" in row.code: + print("yes") + peilgebieden_cat.append(1) + elif "dummy_code_nhws_3" in row.code: + peilgebieden_cat.append(1) + print("yes2") + else: + peilgebieden_cat.append(0) + + +# Add new column +Scheldestromen["peilgebied"]["peilgebied_cat"] = peilgebieden_cat + + +Scheldestromen["peilgebied"]["peilgebied_cat"].unique() + + +# ## Add nhws to ['peilgebied','streefpeil'] + + +# update peilgebied dict key +gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) +gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) +gdf_hws["peilgebied_cat"] = 2 + +gdf_hws = gdf_hws[["globalid", "code", "nen3610id", "peilgebied_cat", "geometry"]] + +Scheldestromen["peilgebied"] = pd.concat([gdf_hws, Scheldestromen["peilgebied"]]) + + +# Create boezem streefpeil layer +streefpeil_hws = pd.DataFrame() +streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) +streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) +streefpeil_hws["geometry"] = [None] * len(gdf_hws) + +Scheldestromen["streefpeil"] = pd.concat([streefpeil_hws, Scheldestromen["streefpeil"]]) +Scheldestromen["streefpeil"] = gpd.GeoDataFrame(Scheldestromen["streefpeil"]) + + +# ### Create buffer layer that ensures spatial match between peilgebied and hws layers based on the buffer layer + + +# # Create buffer polygon +# buffer_polygon = gpd.overlay(gdf_buffer, gdf_grens, how='intersection', keep_geom_type=True) +# buffer_polygon = gpd.overlay(buffer_polygon, gdf_hws, how='difference', keep_geom_type=True) +# buffer_polygon = gpd.overlay(buffer_polygon, Scheldestromen['peilgebied'], how='difference', keep_geom_type=True) + + +# ## Add buffer to ['peilgebied','streefpeil'] + + +# # update peilgebied dict key +# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) +# buffer_polygon['peilgebied_cat'] = 2 + +# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] + +# Scheldestromen['peilgebied'] = pd.concat([buffer_polygon, Scheldestromen['peilgebied']]) + + +# # Create boezem streefpeil layer +# streefpeil_buffer = pd.DataFrame() +# streefpeil_buffer['waterhoogte'] = [np.nan] +# streefpeil_buffer['globalid'] = ['dummy_globalid_nhws_buffer_1'] +# streefpeil_buffer['geometry'] = [None] + +# Scheldestromen['streefpeil'] = pd.concat([streefpeil_buffer, Scheldestromen['streefpeil']]) +# Scheldestromen['streefpeil'] = gpd.GeoDataFrame(Scheldestromen['streefpeil']) + + +if remove_cat_2: + Scheldestromen["peilgebied"] = Scheldestromen["peilgebied"].loc[Scheldestromen["peilgebied"].peilgebied_cat != 2] + + +# ## Store output + + +for key in Scheldestromen.keys(): + print(key) + Scheldestromen[str(key)].to_file(f"{output_folder}/{waterschap2}.gpkg", layer=str(key), driver="GPKG") diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb deleted file mode 100644 index 8d72ecd..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.ipynb +++ /dev/null @@ -1,345 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import numpy as np\n", - "import pandas as pd\n", - "from general_functions import show_layers_and_columns, store_data\n", - "from shapely import wkt\n", - "\n", - "from ribasim_nl import CloudStorage" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "%load_ext autoreload\n", - "%autoreload 2\n", - "pd.set_option(\"display.max_columns\", None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "cloud = CloudStorage()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "waterschap = \"AVG\"\n", - "waterschap_long = \"AmstelGooienVecht\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "cloud.download_aangeleverd(waterschap_long)" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "# Amstel, Gooi en Vecht" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# define paths\n", - "aangeleverd_dir = cloud.joinpath(waterschap_long, \"aangeleverd\")\n", - "verwerkt_dir = cloud.joinpath(waterschap_long, \"verwerkt\")\n", - "\n", - "output_gpkg_path = verwerkt_dir / \"preprocessed\"\n", - "\n", - "# pyogrio needs the exclamation mark to read the file from the zip\n", - "dump_path = (\n", - " aangeleverd_dir / \"aanlevering_6maart24/data dump 6 maart LHM AGV.zip!/data dump 6 maart LHM AGV/\"\n", - ").as_posix()\n", - "\n", - "verwerkt_dir.mkdir(parents=True, exist_ok=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "# AVG has delivered all data in CSV format. Load it in manually with some data mutations\n", - "AVG = {}\n", - "variables = [\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"afsluitmiddel\",\n", - " \"duikersifonhevel\",\n", - " \"hydroobject\",\n", - "] # , 'peilgebiedpraktijk', 'peilafwijkinggebied']\n", - "for variable in variables:\n", - " path_variable = aangeleverd_dir / \"Eerste_levering\" / (variable + \".csv\")\n", - " df_var = pd.read_csv(path_variable, delimiter=\";\")\n", - " geom_col = df_var.keys()[-1] # retrieve the column name\n", - "\n", - " if \"geometrie\" not in geom_col:\n", - " raise ValueError('No \"geometry\" string found in the last column of the dataframe. Check for existence')\n", - "\n", - " df_var[\"geometry\"] = df_var[geom_col].apply(lambda x: wkt.loads(x.split(\";\")[-1]))\n", - " AVG[variable] = df_var\n", - "\n", - "# there is one last gpkg which contains the streefpeilen (and peilgebieden)\n", - "AVG[\"peilgebied\"] = gpd.read_file(aangeleverd_dir / \"Na_levering\" / \"vigerende_peilgebieden.gpkg\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "AVG[\"peilgebied\"][\"streefpeil\"] = np.nan\n", - "AVG[\"peilgebied\"][\"streefpeil\"] = AVG[\"peilgebied\"][\"streefpeil\"].fillna(value=AVG[\"peilgebied\"][\"GPGZMRPL\"])\n", - "AVG[\"peilgebied\"][\"streefpeil\"] = AVG[\"peilgebied\"][\"streefpeil\"].fillna(value=AVG[\"peilgebied\"][\"IWS_GPGVASTP\"])\n", - "AVG[\"peilgebied\"][\"streefpeil\"] = AVG[\"peilgebied\"][\"streefpeil\"].fillna(value=AVG[\"peilgebied\"][\"IWS_GPGONDP\"])\n", - "\n", - "print(\n", - " \"Number of missing streefpeilen = \",\n", - " len(AVG[\"peilgebied\"][\"streefpeil\"].loc[AVG[\"peilgebied\"][\"streefpeil\"].isna()]),\n", - ")\n", - "\n", - "# fig, ax = plt.subplots()\n", - "# AVG['peilgebied'].geometry.plot(ax=ax, color='cornflowerblue')\n", - "# AVG['peilgebied'].loc[AVG['peilgebied']['streefpeil'].isna()].geometry.plot(ax=ax, color='red')\n", - "# ax.legend()" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "# Nalevering" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# overwrite previous data\n", - "AVG[\"stuw\"] = gpd.read_file(dump_path + \"/Stuw.shp\")\n", - "AVG[\"stuw\"] = AVG[\"stuw\"].loc[AVG[\"stuw\"].LHM == \"LHM\"]\n", - "\n", - "AVG[\"gemaal\"] = gpd.read_file(dump_path + \"/Gemaal.shp\")\n", - "AVG[\"gemaal\"] = AVG[\"gemaal\"].loc[AVG[\"gemaal\"].LHM == \"LHM\"]\n", - "\n", - "AVG[\"duikersifonhevel\"] = gpd.read_file(dump_path + \"/DuikerSifonHevel.shp\")\n", - "AVG[\"hydroobject\"] = gpd.read_file(dump_path + \"/LHM_hydrovakken.shp\")\n", - "AVG[\"peilgebied\"] = gpd.read_file(dump_path + \"/LHM_gebieden.shp\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "AVG[\"peilgebied\"].loc[AVG[\"peilgebied\"].zomer == 0, \"zomer\"] = np.nan\n", - "AVG[\"peilgebied\"].loc[AVG[\"peilgebied\"].winter == 0, \"winter\"] = np.nan\n", - "AVG[\"peilgebied\"].loc[AVG[\"peilgebied\"].vast == 0, \"vast\"] = np.nan" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "AVG[\"peilgebied\"][\"streefpeil\"] = AVG[\"peilgebied\"][[\"vast\", \"zomer\"]].min(axis=1, skipna=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# determine aanvoer en afvoer gemalen\n", - "AVG[\"gemaal\"][\"func_aanvoer\"], AVG[\"gemaal\"][\"func_afvoer\"], AVG[\"gemaal\"][\"func_circulatie\"] = (\n", - " False,\n", - " False,\n", - " False,\n", - ") # default is False\n", - "AVG[\"gemaal\"][\"functiegemaal\"] = AVG[\"gemaal\"][\"naam\"].astype(\n", - " str\n", - ") # aan AGV gevraagd of alle opmaling informatie in de kolom 'naam' verwerkt is. Antwoord: ja\n", - "\n", - "AVG[\"gemaal\"].loc[AVG[\"gemaal\"][\"functiegemaal\"] == \"onbekend\", \"functiegemaal\"] = (\n", - " np.nan\n", - ") # replace onbekend with nan, will be filled up later see one line below\n", - "\n", - "AVG[\"gemaal\"].loc[AVG[\"gemaal\"].functiegemaal.str.contains(\"fvoer|nderbemaling|f-|oodpomp\"), \"func_afvoer\"] = True\n", - "AVG[\"gemaal\"].loc[AVG[\"gemaal\"].functiegemaal.str.contains(\"anvoergemaal|pmaling|an-|p-|pvoer\"), \"func_aanvoer\"] = True\n", - "AVG[\"gemaal\"].loc[AVG[\"gemaal\"].functiegemaal.str.contains(\"irculatie\"), \"func_circulatie\"] = True\n", - "AVG[\"gemaal\"].loc[\n", - " ~AVG[\"gemaal\"].func_afvoer & ~AVG[\"gemaal\"].func_aanvoer & ~AVG[\"gemaal\"].func_circulatie,\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# stuw\n", - "AVG[\"stuw\"] = AVG[\"stuw\"][[\"code\", \"geometry\"]]\n", - "AVG[\"stuw\"].loc[:, \"nen3610id\"] = \"dummy_nen3610id_stuw_\" + AVG[\"stuw\"].index.astype(str)\n", - "AVG[\"stuw\"][\"globalid\"] = \"dummy_globalid_stuw_\" + AVG[\"stuw\"].index.astype(str)\n", - "AVG[\"stuw\"] = gpd.GeoDataFrame(AVG[\"stuw\"]).to_crs(\"epsg:28992\")\n", - "\n", - "# gemaal\n", - "AVG[\"gemaal\"] = AVG[\"gemaal\"][[\"code\", \"func_afvoer\", \"func_aanvoer\", \"func_circulatie\", \"geometry\"]]\n", - "AVG[\"gemaal\"].loc[:, \"nen3610id\"] = \"dummy_nen3610id_gemaal_\" + AVG[\"gemaal\"].index.astype(str)\n", - "AVG[\"gemaal\"][\"globalid\"] = \"dummy_globalid_gemaal_\" + AVG[\"gemaal\"].index.astype(str)\n", - "AVG[\"gemaal\"] = gpd.GeoDataFrame(AVG[\"gemaal\"]).to_crs(\"epsg:28992\")\n", - "\n", - "# afsluitmiddel\n", - "AVG[\"afsluitmiddel\"] = AVG[\"afsluitmiddel\"][[\"code\", \"geometry\"]].copy()\n", - "AVG[\"afsluitmiddel\"].loc[:, \"nen3610id\"] = \"dummy_nen3610id_afsluitmiddel_\" + AVG[\"afsluitmiddel\"].index.astype(str)\n", - "AVG[\"afsluitmiddel\"][\"globalid\"] = \"dummy_globalid_afsluitmiddel_\" + AVG[\"afsluitmiddel\"].index.astype(str)\n", - "AVG[\"afsluitmiddel\"] = gpd.GeoDataFrame(AVG[\"afsluitmiddel\"]).set_crs(\"epsg:28992\")\n", - "\n", - "# duikersifonhevel\n", - "AVG[\"duikersifonhevel\"] = AVG[\"duikersifonhevel\"][[\"code\", \"geometry\"]]\n", - "AVG[\"duikersifonhevel\"].loc[:, \"nen3610id\"] = \"dummy_nen3610id_duikersifonhevel_\" + AVG[\n", - " \"duikersifonhevel\"\n", - "].index.astype(str)\n", - "AVG[\"duikersifonhevel\"][\"globalid\"] = \"dummy_globalid_duikersifonhevel_\" + AVG[\"duikersifonhevel\"].index.astype(str)\n", - "AVG[\"duikersifonhevel\"] = gpd.GeoDataFrame(AVG[\"duikersifonhevel\"]).to_crs(\"epsg:28992\")\n", - "\n", - "# hydroobject\n", - "AVG[\"hydroobject\"] = AVG[\"hydroobject\"][[\"geometry\"]]\n", - "AVG[\"hydroobject\"][\"code\"] = \"dummy_code_hydroobject_\" + AVG[\"hydroobject\"].index.astype(str)\n", - "AVG[\"hydroobject\"][\"nen3610id\"] = \"dummy_nen3610id_hydroobject_\" + AVG[\"hydroobject\"].index.astype(str)\n", - "AVG[\"hydroobject\"][\"globalid\"] = \"dummy_globalid_hydroobject_\" + AVG[\"hydroobject\"].index.astype(str)\n", - "AVG[\"hydroobject\"] = gpd.GeoDataFrame(AVG[\"hydroobject\"]).set_crs(\"epsg:28992\")\n", - "\n", - "# streefpeil\n", - "AVG[\"streefpeil\"] = pd.DataFrame()\n", - "AVG[\"streefpeil\"][\"waterhoogte\"] = AVG[\"peilgebied\"][\"streefpeil\"]\n", - "AVG[\"streefpeil\"][\"globalid\"] = \"dummy_globalid_streefpeil_\" + AVG[\"streefpeil\"].index.astype(str)\n", - "AVG[\"streefpeil\"][\"geometry\"] = np.nan\n", - "AVG[\"streefpeil\"] = gpd.GeoDataFrame(AVG[\"streefpeil\"]).set_crs(\"epsg:28992\")\n", - "\n", - "# peilgebied\n", - "AVG[\"peilgebied\"][\"code\"] = AVG[\"peilgebied\"][\"GAFNAAM\"]\n", - "AVG[\"peilgebied\"][\"geometry\"] = AVG[\"peilgebied\"][\"geometry\"]\n", - "AVG[\"peilgebied\"][\"nen3610id\"] = \"dummy_nen3610id_peilgebied_\" + AVG[\"peilgebied\"].index.astype(str)\n", - "AVG[\"peilgebied\"][\"globalid\"] = \"dummy_globalid_peilgebied_\" + AVG[\"peilgebied\"].index.astype(str)\n", - "\n", - "AVG[\"peilgebied\"] = AVG[\"peilgebied\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "AVG[\"peilgebied\"] = gpd.GeoDataFrame(AVG[\"peilgebied\"]).to_crs(\"epsg:28992\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# add duikersifonhevels to the hydroobjecten\n", - "AVG[\"hydroobject\"] = pd.concat([AVG[\"hydroobject\"], AVG[\"duikersifonhevel\"]])\n", - "AVG[\"hydroobject\"] = AVG[\"hydroobject\"].drop_duplicates(subset=\"globalid\") # in case it is run multiple times\n", - "AVG[\"hydroobject\"] = gpd.GeoDataFrame(AVG[\"hydroobject\"]).set_crs(\"epsg:28992\")" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "# Control, store" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=AVG)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "store_data(waterschap=AVG, output_gpkg_path=str(output_gpkg_path))\n", - "cloud.upload_verwerkt(waterschap_long)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "default", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py new file mode 100644 index 0000000..482d593 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py @@ -0,0 +1,188 @@ +import geopandas as gpd +import numpy as np +import pandas as pd +from general_functions import show_layers_and_columns, store_data +from shapely import wkt + +from ribasim_nl import CloudStorage + +pd.set_option("display.max_columns", None) + + +cloud = CloudStorage() + + +waterschap = "AVG" +waterschap_long = "AmstelGooienVecht" + + +cloud.download_aangeleverd(waterschap_long) + + +# # Amstel, Gooi en Vecht + + +# define paths +aangeleverd_dir = cloud.joinpath(waterschap_long, "aangeleverd") +verwerkt_dir = cloud.joinpath(waterschap_long, "verwerkt") + +output_gpkg_path = verwerkt_dir / "preprocessed" + +# pyogrio needs the exclamation mark to read the file from the zip +dump_path = ( + aangeleverd_dir / "aanlevering_6maart24/data dump 6 maart LHM AGV.zip!/data dump 6 maart LHM AGV/" +).as_posix() + +verwerkt_dir.mkdir(parents=True, exist_ok=True) + + +# AVG has delivered all data in CSV format. Load it in manually with some data mutations +AVG = {} +variables = [ + "stuw", + "gemaal", + "afsluitmiddel", + "duikersifonhevel", + "hydroobject", +] # , 'peilgebiedpraktijk', 'peilafwijkinggebied'] +for variable in variables: + path_variable = aangeleverd_dir / "Eerste_levering" / (variable + ".csv") + df_var = pd.read_csv(path_variable, delimiter=";") + geom_col = df_var.keys()[-1] # retrieve the column name + + if "geometrie" not in geom_col: + raise ValueError('No "geometry" string found in the last column of the dataframe. Check for existence') + + df_var["geometry"] = df_var[geom_col].apply(lambda x: wkt.loads(x.split(";")[-1])) + AVG[variable] = df_var + +# there is one last gpkg which contains the streefpeilen (and peilgebieden) +AVG["peilgebied"] = gpd.read_file(aangeleverd_dir / "Na_levering" / "vigerende_peilgebieden.gpkg") + + +AVG["peilgebied"]["streefpeil"] = np.nan +AVG["peilgebied"]["streefpeil"] = AVG["peilgebied"]["streefpeil"].fillna(value=AVG["peilgebied"]["GPGZMRPL"]) +AVG["peilgebied"]["streefpeil"] = AVG["peilgebied"]["streefpeil"].fillna(value=AVG["peilgebied"]["IWS_GPGVASTP"]) +AVG["peilgebied"]["streefpeil"] = AVG["peilgebied"]["streefpeil"].fillna(value=AVG["peilgebied"]["IWS_GPGONDP"]) + +print( + "Number of missing streefpeilen = ", + len(AVG["peilgebied"]["streefpeil"].loc[AVG["peilgebied"]["streefpeil"].isna()]), +) + +# fig, ax = plt.subplots() +# AVG['peilgebied'].geometry.plot(ax=ax, color='cornflowerblue') +# AVG['peilgebied'].loc[AVG['peilgebied']['streefpeil'].isna()].geometry.plot(ax=ax, color='red') +# ax.legend() + + +# # Nalevering + + +# overwrite previous data +AVG["stuw"] = gpd.read_file(dump_path + "/Stuw.shp") +AVG["stuw"] = AVG["stuw"].loc[AVG["stuw"].LHM == "LHM"] + +AVG["gemaal"] = gpd.read_file(dump_path + "/Gemaal.shp") +AVG["gemaal"] = AVG["gemaal"].loc[AVG["gemaal"].LHM == "LHM"] + +AVG["duikersifonhevel"] = gpd.read_file(dump_path + "/DuikerSifonHevel.shp") +AVG["hydroobject"] = gpd.read_file(dump_path + "/LHM_hydrovakken.shp") +AVG["peilgebied"] = gpd.read_file(dump_path + "/LHM_gebieden.shp") + + +AVG["peilgebied"].loc[AVG["peilgebied"].zomer == 0, "zomer"] = np.nan +AVG["peilgebied"].loc[AVG["peilgebied"].winter == 0, "winter"] = np.nan +AVG["peilgebied"].loc[AVG["peilgebied"].vast == 0, "vast"] = np.nan + + +AVG["peilgebied"]["streefpeil"] = AVG["peilgebied"][["vast", "zomer"]].min(axis=1, skipna=True) + + +# determine aanvoer en afvoer gemalen +AVG["gemaal"]["func_aanvoer"], AVG["gemaal"]["func_afvoer"], AVG["gemaal"]["func_circulatie"] = ( + False, + False, + False, +) # default is False +AVG["gemaal"]["functiegemaal"] = AVG["gemaal"]["naam"].astype( + str +) # aan AGV gevraagd of alle opmaling informatie in de kolom 'naam' verwerkt is. Antwoord: ja + +AVG["gemaal"].loc[AVG["gemaal"]["functiegemaal"] == "onbekend", "functiegemaal"] = ( + np.nan +) # replace onbekend with nan, will be filled up later see one line below + +AVG["gemaal"].loc[AVG["gemaal"].functiegemaal.str.contains("fvoer|nderbemaling|f-|oodpomp"), "func_afvoer"] = True +AVG["gemaal"].loc[AVG["gemaal"].functiegemaal.str.contains("anvoergemaal|pmaling|an-|p-|pvoer"), "func_aanvoer"] = True +AVG["gemaal"].loc[AVG["gemaal"].functiegemaal.str.contains("irculatie"), "func_circulatie"] = True +AVG["gemaal"].loc[ + ~AVG["gemaal"].func_afvoer & ~AVG["gemaal"].func_aanvoer & ~AVG["gemaal"].func_circulatie, + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + + +# stuw +AVG["stuw"] = AVG["stuw"][["code", "geometry"]] +AVG["stuw"].loc[:, "nen3610id"] = "dummy_nen3610id_stuw_" + AVG["stuw"].index.astype(str) +AVG["stuw"]["globalid"] = "dummy_globalid_stuw_" + AVG["stuw"].index.astype(str) +AVG["stuw"] = gpd.GeoDataFrame(AVG["stuw"]).to_crs("epsg:28992") + +# gemaal +AVG["gemaal"] = AVG["gemaal"][["code", "func_afvoer", "func_aanvoer", "func_circulatie", "geometry"]] +AVG["gemaal"].loc[:, "nen3610id"] = "dummy_nen3610id_gemaal_" + AVG["gemaal"].index.astype(str) +AVG["gemaal"]["globalid"] = "dummy_globalid_gemaal_" + AVG["gemaal"].index.astype(str) +AVG["gemaal"] = gpd.GeoDataFrame(AVG["gemaal"]).to_crs("epsg:28992") + +# afsluitmiddel +AVG["afsluitmiddel"] = AVG["afsluitmiddel"][["code", "geometry"]].copy() +AVG["afsluitmiddel"].loc[:, "nen3610id"] = "dummy_nen3610id_afsluitmiddel_" + AVG["afsluitmiddel"].index.astype(str) +AVG["afsluitmiddel"]["globalid"] = "dummy_globalid_afsluitmiddel_" + AVG["afsluitmiddel"].index.astype(str) +AVG["afsluitmiddel"] = gpd.GeoDataFrame(AVG["afsluitmiddel"]).set_crs("epsg:28992") + +# duikersifonhevel +AVG["duikersifonhevel"] = AVG["duikersifonhevel"][["code", "geometry"]] +AVG["duikersifonhevel"].loc[:, "nen3610id"] = "dummy_nen3610id_duikersifonhevel_" + AVG[ + "duikersifonhevel" +].index.astype(str) +AVG["duikersifonhevel"]["globalid"] = "dummy_globalid_duikersifonhevel_" + AVG["duikersifonhevel"].index.astype(str) +AVG["duikersifonhevel"] = gpd.GeoDataFrame(AVG["duikersifonhevel"]).to_crs("epsg:28992") + +# hydroobject +AVG["hydroobject"] = AVG["hydroobject"][["geometry"]] +AVG["hydroobject"]["code"] = "dummy_code_hydroobject_" + AVG["hydroobject"].index.astype(str) +AVG["hydroobject"]["nen3610id"] = "dummy_nen3610id_hydroobject_" + AVG["hydroobject"].index.astype(str) +AVG["hydroobject"]["globalid"] = "dummy_globalid_hydroobject_" + AVG["hydroobject"].index.astype(str) +AVG["hydroobject"] = gpd.GeoDataFrame(AVG["hydroobject"]).set_crs("epsg:28992") + +# streefpeil +AVG["streefpeil"] = pd.DataFrame() +AVG["streefpeil"]["waterhoogte"] = AVG["peilgebied"]["streefpeil"] +AVG["streefpeil"]["globalid"] = "dummy_globalid_streefpeil_" + AVG["streefpeil"].index.astype(str) +AVG["streefpeil"]["geometry"] = np.nan +AVG["streefpeil"] = gpd.GeoDataFrame(AVG["streefpeil"]).set_crs("epsg:28992") + +# peilgebied +AVG["peilgebied"]["code"] = AVG["peilgebied"]["GAFNAAM"] +AVG["peilgebied"]["geometry"] = AVG["peilgebied"]["geometry"] +AVG["peilgebied"]["nen3610id"] = "dummy_nen3610id_peilgebied_" + AVG["peilgebied"].index.astype(str) +AVG["peilgebied"]["globalid"] = "dummy_globalid_peilgebied_" + AVG["peilgebied"].index.astype(str) + +AVG["peilgebied"] = AVG["peilgebied"][["code", "nen3610id", "globalid", "geometry"]] +AVG["peilgebied"] = gpd.GeoDataFrame(AVG["peilgebied"]).to_crs("epsg:28992") + + +# add duikersifonhevels to the hydroobjecten +AVG["hydroobject"] = pd.concat([AVG["hydroobject"], AVG["duikersifonhevel"]]) +AVG["hydroobject"] = AVG["hydroobject"].drop_duplicates(subset="globalid") # in case it is run multiple times +AVG["hydroobject"] = gpd.GeoDataFrame(AVG["hydroobject"]).set_crs("epsg:28992") + + +# # Control, store + + +show_layers_and_columns(waterschap=AVG) + + +store_data(waterschap=AVG, output_gpkg_path=str(output_gpkg_path)) +cloud.upload_verwerkt(waterschap_long) diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.ipynb deleted file mode 100644 index 19ff4db..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.ipynb +++ /dev/null @@ -1,275 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import os\n", - "\n", - "import geopandas as gpd\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "from general_functions import *" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "# Delfland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Delfland\"\n", - "gdb_path = \"../../Data_preprocessed/Waterschappen/Delfland/Watersysteem.gdb\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/Delfland\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "Delfland = read_gpkg_layers(\n", - " gpkg_path=gdb_path, variables=[\"stuw\", \"gemaal\", \"watergang\", \"duikersifonhevel\", \"peilgebiedpraktijk\", \"keerschot\"]\n", - ")\n", - "# 'peilafwijkinggebied',\n", - "# 'pomp'])\n", - "# 'streefpeil'])\n", - "# change names\n", - "Delfland[\"hydroobject\"] = Delfland.pop(\"watergang\")" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "### Adjust column names" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# discard irrelevant data of Delfland Delfland, and create a uniform dataset compared to the other waterschappen\n", - "# Stuw\n", - "Delfland[\"stuw\"] = Delfland[\"stuw\"][[\"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "Delfland[\"stuw\"] = Delfland[\"stuw\"].rename(columns={\"CODE\": \"code\", \"GLOBALID\": \"globalid\"})\n", - "Delfland[\"stuw\"][\"nen3610id\"] = \"dummy_nen3610id_stuw_\" + Delfland[\"stuw\"].index.astype(str)\n", - "\n", - "# Gemaal\n", - "\n", - "# determine aanvoer en afvoer gemalen\n", - "Delfland[\"gemaal\"][\"func_aanvoer\"], Delfland[\"gemaal\"][\"func_afvoer\"], Delfland[\"gemaal\"][\"func_circulatie\"] = (\n", - " False,\n", - " False,\n", - " False,\n", - ") # default is False\n", - "Delfland[\"gemaal\"].FUNCTIEGEMAAL_resolved.fillna(\n", - " Delfland[\"gemaal\"].WS_SOORTGEMAAL\n", - ") # sometimes recircualtie is located in another column, but sometimes they are different. Only fill in for NaN\n", - "Delfland[\"gemaal\"][\"FUNCTIEGEMAAL_resolved\"] = Delfland[\"gemaal\"][\"FUNCTIEGEMAAL_resolved\"].astype(str)\n", - "\n", - "Delfland[\"gemaal\"].loc[\n", - " Delfland[\"gemaal\"].FUNCTIEGEMAAL_resolved.str.contains(\"Onbekend|Onderbemaling|Afvoergemaal|Af-\"), \"func_afvoer\"\n", - "] = True\n", - "Delfland[\"gemaal\"].loc[Delfland[\"gemaal\"].FUNCTIEGEMAAL_resolved.str.contains(\"Opmaling|Aanvoer\"), \"func_aanvoer\"] = (\n", - " True\n", - ")\n", - "Delfland[\"gemaal\"].loc[\n", - " Delfland[\"gemaal\"].FUNCTIEGEMAAL_resolved.str.contains(\"Overig|circulatie\"), \"func_circulatie\"\n", - "] = True\n", - "Delfland[\"gemaal\"].loc[\n", - " (Delfland[\"gemaal\"].func_afvoer is False)\n", - " & (Delfland[\"gemaal\"].func_aanvoer is False)\n", - " & (Delfland[\"gemaal\"].func_circulatie is False),\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown\n", - "\n", - "Delfland[\"gemaal\"] = Delfland[\"gemaal\"][[\"GLOBALID\", \"func_afvoer\", \"func_aanvoer\", \"func_circulatie\", \"geometry\"]]\n", - "Delfland[\"gemaal\"] = Delfland[\"gemaal\"].rename(columns={\"GLOBALID\": \"globalid\"})\n", - "Delfland[\"gemaal\"][\"code\"] = \"dummy_code_gemaal_\" + Delfland[\"gemaal\"].index.astype(str)\n", - "Delfland[\"gemaal\"][\"nen3610id\"] = \"dummy_nen3610id_gemaal_\" + Delfland[\"gemaal\"].index.astype(str)\n", - "\n", - "# Hydroobject\n", - "Delfland[\"hydroobject\"] = Delfland[\"hydroobject\"][[\"GLOBALID\", \"geometry\"]]\n", - "Delfland[\"hydroobject\"] = Delfland[\"hydroobject\"].rename(columns={\"GLOBALID\": \"globalid\"})\n", - "Delfland[\"hydroobject\"][\"code\"] = \"dummy_code_hydroobject_\" + Delfland[\"hydroobject\"].index.astype(str)\n", - "Delfland[\"hydroobject\"][\"nen3610id\"] = \"dummy_nen3610id_hydroobject_\" + Delfland[\"hydroobject\"].index.astype(str)\n", - "\n", - "# Keerschot\n", - "Delfland[\"keerschot\"] = Delfland[\"keerschot\"][[\"GLOBALID\", \"geometry\"]]\n", - "Delfland[\"keerschot\"] = Delfland[\"keerschot\"].rename(columns={\"GLOBALID\": \"globalid\"})\n", - "Delfland[\"keerschot\"][\"code\"] = \"dummy_code_keerschot_\" + Delfland[\"keerschot\"].index.astype(str)\n", - "Delfland[\"keerschot\"][\"nen3610id\"] = \"dummy_nen3610id_keerschot_\" + Delfland[\"keerschot\"].index.astype(str)\n", - "\n", - "# duikersifonhevel\n", - "Delfland[\"duikersifonhevel\"] = Delfland[\"duikersifonhevel\"][[\"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "Delfland[\"duikersifonhevel\"] = Delfland[\"duikersifonhevel\"].rename(columns={\"CODE\": \"code\", \"GLOBALID\": \"globalid\"})\n", - "Delfland[\"duikersifonhevel\"][\"code\"] = \"dummy_code_duikersifonhevel_\" + Delfland[\"duikersifonhevel\"].index.astype(str)\n", - "Delfland[\"duikersifonhevel\"][\"nen3610id\"] = \"dummy_nen3610id_duikersifonhevel_\" + Delfland[\n", - " \"duikersifonhevel\"\n", - "].index.astype(str)\n", - "\n", - "# afsluitmiddel\n", - "# niet geleverd\n", - "\n", - "# Peilgebiedpraktijk\n", - "Delfland[\"peilgebiedpraktijk\"] = Delfland[\"peilgebiedpraktijk\"][[\"WS_HOOGPEIL\", \"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "Delfland[\"peilgebiedpraktijk\"][\"nen3610id\"] = \"dummy_nen3610id_peilgebiedpraktijk_\" + Delfland[\n", - " \"peilgebiedpraktijk\"\n", - "].index.astype(str)\n", - "Delfland[\"peilgebiedpraktijk\"] = Delfland[\"peilgebiedpraktijk\"].rename(\n", - " columns={\"WS_HOOGPEIL\": \"streefpeil\", \"CODE\": \"code\", \"GLOBALID\": \"globalid\"}\n", - ")\n", - "\n", - "# Streefpeil\n", - "Delfland[\"streefpeil\"] = pd.DataFrame()\n", - "Delfland[\"streefpeil\"][\"waterhoogte\"] = Delfland[\"peilgebiedpraktijk\"][\"streefpeil\"]\n", - "Delfland[\"streefpeil\"][\"globalid\"] = Delfland[\"peilgebiedpraktijk\"][\"globalid\"]\n", - "Delfland[\"streefpeil\"][\"geometry\"] = None\n", - "Delfland[\"streefpeil\"] = gpd.GeoDataFrame(Delfland[\"streefpeil\"], geometry=\"geometry\")\n", - "\n", - "Delfland[\"peilgebied\"] = Delfland[\"peilgebiedpraktijk\"]" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "### Add column to determine the HWS_BZM" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "Delfland[\"peilgebied\"][\"HWS_BZM\"] = False\n", - "Delfland[\"peilgebied\"].loc[Delfland[\"peilgebied\"].code == \"BZM 1\", \"HWS_BZM\"] = True # looked up manually" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# delete irrelvant data\n", - "variables = [\"peilgebiedpraktijk\"]\n", - "\n", - "for variable in variables:\n", - " if str(variable) in Delfland:\n", - " del Delfland[variable]" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=Delfland)" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# Check if the directory exists\n", - "if not os.path.exists(output_gpkg_path):\n", - " # If it doesn't exist, create it\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=Delfland, output_gpkg_path=output_gpkg_path + \"/Delfland\")" - ] - }, - { - "cell_type": "raw", - "id": "14", - "metadata": {}, - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py new file mode 100644 index 0000000..653f493 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py @@ -0,0 +1,141 @@ +# import packages and functions +import os + +import geopandas as gpd +import pandas as pd +from general_functions import * + +# # Delfland + + +# define relative paths +waterschap = "Delfland" +gdb_path = "../../Data_preprocessed/Waterschappen/Delfland/Watersysteem.gdb" +output_gpkg_path = "../../Data_postprocessed/Waterschappen/Delfland" + + +Delfland = read_gpkg_layers( + gpkg_path=gdb_path, variables=["stuw", "gemaal", "watergang", "duikersifonhevel", "peilgebiedpraktijk", "keerschot"] +) +# 'peilafwijkinggebied', +# 'pomp']) +# 'streefpeil']) +# change names +Delfland["hydroobject"] = Delfland.pop("watergang") + + +# ### Adjust column names + + +# discard irrelevant data of Delfland Delfland, and create a uniform dataset compared to the other waterschappen +# Stuw +Delfland["stuw"] = Delfland["stuw"][["CODE", "GLOBALID", "geometry"]] +Delfland["stuw"] = Delfland["stuw"].rename(columns={"CODE": "code", "GLOBALID": "globalid"}) +Delfland["stuw"]["nen3610id"] = "dummy_nen3610id_stuw_" + Delfland["stuw"].index.astype(str) + +# Gemaal + +# determine aanvoer en afvoer gemalen +Delfland["gemaal"]["func_aanvoer"], Delfland["gemaal"]["func_afvoer"], Delfland["gemaal"]["func_circulatie"] = ( + False, + False, + False, +) # default is False +Delfland["gemaal"].FUNCTIEGEMAAL_resolved.fillna( + Delfland["gemaal"].WS_SOORTGEMAAL +) # sometimes recircualtie is located in another column, but sometimes they are different. Only fill in for NaN +Delfland["gemaal"]["FUNCTIEGEMAAL_resolved"] = Delfland["gemaal"]["FUNCTIEGEMAAL_resolved"].astype(str) + +Delfland["gemaal"].loc[ + Delfland["gemaal"].FUNCTIEGEMAAL_resolved.str.contains("Onbekend|Onderbemaling|Afvoergemaal|Af-"), "func_afvoer" +] = True +Delfland["gemaal"].loc[Delfland["gemaal"].FUNCTIEGEMAAL_resolved.str.contains("Opmaling|Aanvoer"), "func_aanvoer"] = ( + True +) +Delfland["gemaal"].loc[ + Delfland["gemaal"].FUNCTIEGEMAAL_resolved.str.contains("Overig|circulatie"), "func_circulatie" +] = True +Delfland["gemaal"].loc[ + (Delfland["gemaal"].func_afvoer is False) + & (Delfland["gemaal"].func_aanvoer is False) + & (Delfland["gemaal"].func_circulatie is False), + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + +Delfland["gemaal"] = Delfland["gemaal"][["GLOBALID", "func_afvoer", "func_aanvoer", "func_circulatie", "geometry"]] +Delfland["gemaal"] = Delfland["gemaal"].rename(columns={"GLOBALID": "globalid"}) +Delfland["gemaal"]["code"] = "dummy_code_gemaal_" + Delfland["gemaal"].index.astype(str) +Delfland["gemaal"]["nen3610id"] = "dummy_nen3610id_gemaal_" + Delfland["gemaal"].index.astype(str) + +# Hydroobject +Delfland["hydroobject"] = Delfland["hydroobject"][["GLOBALID", "geometry"]] +Delfland["hydroobject"] = Delfland["hydroobject"].rename(columns={"GLOBALID": "globalid"}) +Delfland["hydroobject"]["code"] = "dummy_code_hydroobject_" + Delfland["hydroobject"].index.astype(str) +Delfland["hydroobject"]["nen3610id"] = "dummy_nen3610id_hydroobject_" + Delfland["hydroobject"].index.astype(str) + +# Keerschot +Delfland["keerschot"] = Delfland["keerschot"][["GLOBALID", "geometry"]] +Delfland["keerschot"] = Delfland["keerschot"].rename(columns={"GLOBALID": "globalid"}) +Delfland["keerschot"]["code"] = "dummy_code_keerschot_" + Delfland["keerschot"].index.astype(str) +Delfland["keerschot"]["nen3610id"] = "dummy_nen3610id_keerschot_" + Delfland["keerschot"].index.astype(str) + +# duikersifonhevel +Delfland["duikersifonhevel"] = Delfland["duikersifonhevel"][["CODE", "GLOBALID", "geometry"]] +Delfland["duikersifonhevel"] = Delfland["duikersifonhevel"].rename(columns={"CODE": "code", "GLOBALID": "globalid"}) +Delfland["duikersifonhevel"]["code"] = "dummy_code_duikersifonhevel_" + Delfland["duikersifonhevel"].index.astype(str) +Delfland["duikersifonhevel"]["nen3610id"] = "dummy_nen3610id_duikersifonhevel_" + Delfland[ + "duikersifonhevel" +].index.astype(str) + +# afsluitmiddel +# niet geleverd + +# Peilgebiedpraktijk +Delfland["peilgebiedpraktijk"] = Delfland["peilgebiedpraktijk"][["WS_HOOGPEIL", "CODE", "GLOBALID", "geometry"]] +Delfland["peilgebiedpraktijk"]["nen3610id"] = "dummy_nen3610id_peilgebiedpraktijk_" + Delfland[ + "peilgebiedpraktijk" +].index.astype(str) +Delfland["peilgebiedpraktijk"] = Delfland["peilgebiedpraktijk"].rename( + columns={"WS_HOOGPEIL": "streefpeil", "CODE": "code", "GLOBALID": "globalid"} +) + +# Streefpeil +Delfland["streefpeil"] = pd.DataFrame() +Delfland["streefpeil"]["waterhoogte"] = Delfland["peilgebiedpraktijk"]["streefpeil"] +Delfland["streefpeil"]["globalid"] = Delfland["peilgebiedpraktijk"]["globalid"] +Delfland["streefpeil"]["geometry"] = None +Delfland["streefpeil"] = gpd.GeoDataFrame(Delfland["streefpeil"], geometry="geometry") + +Delfland["peilgebied"] = Delfland["peilgebiedpraktijk"] + + +# ### Add column to determine the HWS_BZM + + +Delfland["peilgebied"]["HWS_BZM"] = False +Delfland["peilgebied"].loc[Delfland["peilgebied"].code == "BZM 1", "HWS_BZM"] = True # looked up manually + + +# delete irrelvant data +variables = ["peilgebiedpraktijk"] + +for variable in variables: + if str(variable) in Delfland: + del Delfland[variable] + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=Delfland) + + +# ### Store data + + +# Check if the directory exists +if not os.path.exists(output_gpkg_path): + # If it doesn't exist, create it + os.makedirs(output_gpkg_path) + +store_data(waterschap=Delfland, output_gpkg_path=output_gpkg_path + "/Delfland") diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.ipynb deleted file mode 100644 index c3cf825..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.ipynb +++ /dev/null @@ -1,374 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import geopandas as gpd\n", - "import numpy as np\n", - "import pandas as pd\n", - "from general_functions import *\n", - "\n", - "%load_ext autoreload\n", - "%autoreload 2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "# Hollands Noorderkwartier" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "gpkg_path_HHNK = \"../../Data_preprocessed/Waterschappen/HHNK/Noorderkwartier.gpkg\"\n", - "gdb_path_HHNK = \"../../Data_preprocessed/Waterschappen/HHNK/Watersysteemanalyse_BWN2.gdb\"\n", - "gdb_path_HHNK_nalevering = \"../../Data_preprocessed/Waterschappen/HHNK/Na_levering_HHNK_gemalen_stuwen_20240321.gdb\"\n", - "output_gpkg_path_HHNK = \"../../Data_postprocessed/Waterschappen/HHNK/Noorderkwartier\"\n", - "interim_results = \"../../Interim_results/Waterschappen/HHNK/Noorderkwartier_IR\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# retrieve the data\n", - "HHNK = read_gpkg_layers(\n", - " gpkg_path=gpkg_path_HHNK,\n", - " variables=[\n", - " # 'stuw', #nalevering\n", - " # 'gemaal', #nalevering\n", - " \"afsluitmiddel\",\n", - " \"hydroobject\",\n", - " \"duikersifonhevel\",\n", - " ],\n", - ")\n", - "# 'peilafwijkinggebied',\n", - "# 'peilgebiedpraktijk',\n", - "# 'pomp'])\n", - "# 'streefpeil'])\n", - "\n", - "# retrieve data from a gdb, as the gpkg of HHNK does not contain all relevant data\n", - "data_gdb = gpd.read_file(gdb_path_HHNK, layer=\"BWN_ruimtekaart\")\n", - "HHNK_nalevering = read_gpkg_layers(gpkg_path=gdb_path_HHNK_nalevering, variables=[\"stuw\", \"gemaal\"]) # nalevering\n", - "\n", - "HHNK[\"stuw\"] = HHNK_nalevering[\"stuw\"]\n", - "HHNK[\"gemaal\"] = HHNK_nalevering[\"gemaal\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# determine aanvoer en afvoer gemalen\n", - "HHNK[\"gemaal\"][\"func_aanvoer\"], HHNK[\"gemaal\"][\"func_afvoer\"], HHNK[\"gemaal\"][\"func_circulatie\"] = (\n", - " False,\n", - " False,\n", - " False,\n", - ") # default is False\n", - "HHNK[\"gemaal\"][\"functiegemaal\"] = HHNK[\"gemaal\"][\"FUNCTIEGEMAAL\"].astype(str)\n", - "HHNK[\"gemaal\"].loc[HHNK[\"gemaal\"][\"functiegemaal\"] == \"onbekend\", \"functiegemaal\"] = (\n", - " np.nan\n", - ") # replace onbekend with nan, will be filled up later see one line below\n", - "HHNK[\"gemaal\"].loc[HHNK[\"gemaal\"][\"functiegemaal\"] == \"99\", \"functiegemaal\"] = (\n", - " np.nan\n", - ") # replace onbekend with nan, will be filled up later see one line below\n", - "HHNK[\"gemaal\"][\"functiegemaal\"].fillna(\n", - " HHNK[\"gemaal\"][\"OPMERKING\"], inplace=True\n", - ") # some additional is given in this column\n", - "HHNK[\"gemaal\"] = HHNK[\"gemaal\"].loc[\n", - " HHNK[\"gemaal\"][\"functiegemaal\"] != \"niet meer in gebruik\"\n", - "] # filter the gemalen out which are not in use\n", - "HHNK[\"gemaal\"] = HHNK[\"gemaal\"].loc[\n", - " HHNK[\"gemaal\"][\"functiegemaal\"] != \"901\"\n", - "] # filter the gemalen out which are not in use\n", - "\n", - "# HHNK['gemaal'].loc[HHNK['gemaal'].functiegemaal.str.contains('onderbemaling|afvoer|af-'), 'func_afvoer'] = True\n", - "# HHNK['gemaal'].loc[HHNK['gemaal'].functiegemaal.str.contains('trekker|opmaling|op-|wateraanvoer|aanvoer'), 'func_aanvoer'] = True #aannamen: trekkerpompen vooral voor wateraanvoer\n", - "# HHNK['gemaal'].loc[HHNK['gemaal'].functiegemaal.str.contains('doorspoelpomp'), 'func_circulatie'] = True\n", - "\n", - "afvoer_values = [\"2\", \"4\", \"5\", \"6\", \"903\"]\n", - "aanvoer_values = [\"1\", \"3\", \"5\", \"902\", \"903\"] # aannamen: trekkerpompen vooral voor wateraanvoer\n", - "circulatie_values = [\"904\"]\n", - "\n", - "\n", - "HHNK[\"gemaal\"].loc[HHNK[\"gemaal\"][\"functiegemaal\"].isin(afvoer_values), \"func_afvoer\"] = True\n", - "HHNK[\"gemaal\"].loc[HHNK[\"gemaal\"][\"functiegemaal\"].isin(aanvoer_values), \"func_aanvoer\"] = True\n", - "HHNK[\"gemaal\"].loc[HHNK[\"gemaal\"][\"functiegemaal\"].isin(circulatie_values), \"func_circulatie\"] = True\n", - "\n", - "HHNK[\"gemaal\"].loc[\n", - " (HHNK[\"gemaal\"].func_afvoer is False)\n", - " & (HHNK[\"gemaal\"].func_aanvoer is False)\n", - " & (HHNK[\"gemaal\"].func_circulatie is False),\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "# gemaal\n", - "HHNK[\"gemaal\"].rename(columns={\"CODE\": \"code\", \"GLOBALID\": \"globalid\"}, inplace=True)\n", - "HHNK[\"gemaal\"][\"nen3610id\"] = \"dummy_nen3610id_\" + HHNK[\"gemaal\"].index.astype(\n", - " str\n", - ") # create a string as the globalid is usually a str as well\n", - "\n", - "# stuw\n", - "HHNK[\"stuw\"].rename(columns={\"CODE\": \"code\", \"GLOBALID\": \"globalid\"}, inplace=True)\n", - "HHNK[\"stuw\"][\"nen3610id\"] = \"dummy_nen3610id_\" + HHNK[\"stuw\"].index.astype(\n", - " str\n", - ") # create a string as the globalid is usually a str as well" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "### GPKG" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# discard irrelevant dataHHNK\n", - "HHNK[\"stuw\"] = HHNK[\"stuw\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "HHNK[\"gemaal\"] = HHNK[\"gemaal\"][\n", - " [\"code\", \"globalid\", \"nen3610id\", \"func_afvoer\", \"func_aanvoer\", \"func_circulatie\", \"geometry\"]\n", - "]\n", - "HHNK[\"hydroobject\"] = HHNK[\"hydroobject\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "HHNK[\"afsluitmiddel\"] = HHNK[\"afsluitmiddel\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "HHNK[\"duikersifonhevel\"] = HHNK[\"duikersifonhevel\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "### .GDB" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "data_gdb = data_gdb[[\"streefpeil\", \"geometry\"]]\n", - "data_gdb[\"globalid\"] = \"dummy_globalid_\" + data_gdb.index.astype(\n", - " str\n", - ") # create a string as the globalid is usually a str as well\n", - "streefpeil = data_gdb[[\"streefpeil\", \"globalid\"]]\n", - "peilgebied = data_gdb[[\"globalid\", \"geometry\"]]\n", - "\n", - "# add the data to the dictionary\n", - "HHNK[\"streefpeil\"] = streefpeil\n", - "HHNK[\"peilgebied\"] = peilgebied\n", - "\n", - "# hand made changes\n", - "HHNK[\"peilgebied\"] = HHNK[\"peilgebied\"][\n", - " HHNK[\"peilgebied\"][\"globalid\"] != 1725\n", - "] # not a correct shape. Basically only lines, with 36 seperate segments" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "HHNK[\"streefpeil\"] = HHNK[\"streefpeil\"].rename(columns={\"streefpeil\": \"waterhoogte\"})\n", - "HHNK[\"streefpeil\"][\"geometry\"] = None\n", - "HHNK[\"streefpeil\"] = gpd.GeoDataFrame(HHNK[\"streefpeil\"], geometry=\"geometry\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# HHNK['streefpeil']['code'] = 'dummy_code_streefpeil_' + HHNK['streefpeil'].index.astype(str)\n", - "# HHNK['streefpeil']['nen3610id'] = 'dummy_nen3610id_streefpeil_' + HHNK['streefpeil'].index.astype(str)\n", - "\n", - "HHNK[\"peilgebied\"][\"code\"] = \"dummy_code_\" + HHNK[\"peilgebied\"].index.astype(str)\n", - "HHNK[\"peilgebied\"][\"nen3610id\"] = \"dummy_nen3610id_\" + HHNK[\"peilgebied\"].index.astype(str)\n", - "HHNK[\"peilgebied\"][\"HWS_BZM\"] = False" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=HHNK)" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": { - "tags": [] - }, - "source": [ - "# Add the boezem and hoofdwatersysteem" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "Some changes by hand have been made. The resulting shapefile contains the bordering BZM and HWS shapes, including streefpeil" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "path_HWS_BZM = \"..\\..\\Scripts\\Aggregeren\\Hoofdwatersysteem\\BZM_HWS_HHNK.shp\"\n", - "HWS_BZM = gpd.read_file(path_HWS_BZM)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "HWS_BZM[\"code\"] = \"dummy_code_\" + (HWS_BZM.index + max(HHNK[\"peilgebied\"].index) + 1).astype(str)\n", - "HWS_BZM[\"globalid\"] = \"dummy_globalid_\" + (HWS_BZM.index + max(HHNK[\"peilgebied\"].index) + 1).astype(str)\n", - "HWS_BZM[\"nen3610id\"] = \"dummy_nen3610id_\" + (HWS_BZM.index + max(HHNK[\"peilgebied\"].index) + 1).astype(str)\n", - "HWS_BZM[\"waterhoogte\"] = HWS_BZM[\"zomerpeil\"]\n", - "HWS_BZM[\"HWS_BZM\"] = True\n", - "HWS_BZM = HWS_BZM[[\"code\", \"globalid\", \"nen3610id\", \"waterhoogte\", \"HWS_BZM\", \"geometry\"]]\n", - "\n", - "HWS_BZM_peilgebied = HWS_BZM[[\"code\", \"globalid\", \"nen3610id\", \"HWS_BZM\", \"geometry\"]]\n", - "HWS_BZM_streefpeil = HWS_BZM[[\"waterhoogte\", \"globalid\", \"geometry\"]]\n", - "\n", - "HHNK[\"peilgebied\"] = gpd.GeoDataFrame(pd.concat([HHNK[\"peilgebied\"], HWS_BZM_peilgebied])).reset_index(drop=True)\n", - "HHNK[\"streefpeil\"] = gpd.GeoDataFrame(pd.concat([HHNK[\"streefpeil\"], HWS_BZM_streefpeil])).reset_index(drop=True)" - ] - }, - { - "cell_type": "markdown", - "id": "20", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "store_data(waterschap=HHNK, output_gpkg_path=output_gpkg_path_HHNK)" - ] - }, - { - "cell_type": "raw", - "id": "22", - "metadata": {}, - "source": [ - "Toevoegen aan notities:\n", - "\n", - "Duikersifonhevel and hydroobject have a type of multicurvedZ, the peilgebieden a MultiSurfaceZ, which geopandas can not handle. I have manually exported these to single shapes, which automatically converts it to regular MultiStrings. Then these layers have been packed together to a geopackage again. \n", - "\n", - "Peilmerk is geometrisch gekoppeld aan peilgebieden, niet administratief. Daarnaast zijn bij een aantal beschikbaar of deze gekoppeld zijn met een gemaal, stuw, duikersifonhevel (wel administratief). Wel is er een streefpeil tabel beschikbaar, die wel administratief gekoppeld is. Ga kijken wat het verschil is.\n", - "\n", - "In de streefpeilen kaart zijn er verschillende soorten peilen:\n", - "- winter\n", - "- zomer\n", - "- vast\n", - "- dynamische bovengrens\n", - "- dynamische ondergrens" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py new file mode 100644 index 0000000..afe2333 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py @@ -0,0 +1,190 @@ +# import packages and functions +import geopandas as gpd +import numpy as np +import pandas as pd +from general_functions import * + +# # Hollands Noorderkwartier + + +# define relative paths +gpkg_path_HHNK = "../../Data_preprocessed/Waterschappen/HHNK/Noorderkwartier.gpkg" +gdb_path_HHNK = "../../Data_preprocessed/Waterschappen/HHNK/Watersysteemanalyse_BWN2.gdb" +gdb_path_HHNK_nalevering = "../../Data_preprocessed/Waterschappen/HHNK/Na_levering_HHNK_gemalen_stuwen_20240321.gdb" +output_gpkg_path_HHNK = "../../Data_postprocessed/Waterschappen/HHNK/Noorderkwartier" +interim_results = "../../Interim_results/Waterschappen/HHNK/Noorderkwartier_IR" + + +# retrieve the data +HHNK = read_gpkg_layers( + gpkg_path=gpkg_path_HHNK, + variables=[ + # 'stuw', #nalevering + # 'gemaal', #nalevering + "afsluitmiddel", + "hydroobject", + "duikersifonhevel", + ], +) +# 'peilafwijkinggebied', +# 'peilgebiedpraktijk', +# 'pomp']) +# 'streefpeil']) + +# retrieve data from a gdb, as the gpkg of HHNK does not contain all relevant data +data_gdb = gpd.read_file(gdb_path_HHNK, layer="BWN_ruimtekaart") +HHNK_nalevering = read_gpkg_layers(gpkg_path=gdb_path_HHNK_nalevering, variables=["stuw", "gemaal"]) # nalevering + +HHNK["stuw"] = HHNK_nalevering["stuw"] +HHNK["gemaal"] = HHNK_nalevering["gemaal"] + + +# determine aanvoer en afvoer gemalen +HHNK["gemaal"]["func_aanvoer"], HHNK["gemaal"]["func_afvoer"], HHNK["gemaal"]["func_circulatie"] = ( + False, + False, + False, +) # default is False +HHNK["gemaal"]["functiegemaal"] = HHNK["gemaal"]["FUNCTIEGEMAAL"].astype(str) +HHNK["gemaal"].loc[HHNK["gemaal"]["functiegemaal"] == "onbekend", "functiegemaal"] = ( + np.nan +) # replace onbekend with nan, will be filled up later see one line below +HHNK["gemaal"].loc[HHNK["gemaal"]["functiegemaal"] == "99", "functiegemaal"] = ( + np.nan +) # replace onbekend with nan, will be filled up later see one line below +HHNK["gemaal"]["functiegemaal"].fillna( + HHNK["gemaal"]["OPMERKING"], inplace=True +) # some additional is given in this column +HHNK["gemaal"] = HHNK["gemaal"].loc[ + HHNK["gemaal"]["functiegemaal"] != "niet meer in gebruik" +] # filter the gemalen out which are not in use +HHNK["gemaal"] = HHNK["gemaal"].loc[ + HHNK["gemaal"]["functiegemaal"] != "901" +] # filter the gemalen out which are not in use + +# HHNK['gemaal'].loc[HHNK['gemaal'].functiegemaal.str.contains('onderbemaling|afvoer|af-'), 'func_afvoer'] = True +# HHNK['gemaal'].loc[HHNK['gemaal'].functiegemaal.str.contains('trekker|opmaling|op-|wateraanvoer|aanvoer'), 'func_aanvoer'] = True #aannamen: trekkerpompen vooral voor wateraanvoer +# HHNK['gemaal'].loc[HHNK['gemaal'].functiegemaal.str.contains('doorspoelpomp'), 'func_circulatie'] = True + +afvoer_values = ["2", "4", "5", "6", "903"] +aanvoer_values = ["1", "3", "5", "902", "903"] # aannamen: trekkerpompen vooral voor wateraanvoer +circulatie_values = ["904"] + + +HHNK["gemaal"].loc[HHNK["gemaal"]["functiegemaal"].isin(afvoer_values), "func_afvoer"] = True +HHNK["gemaal"].loc[HHNK["gemaal"]["functiegemaal"].isin(aanvoer_values), "func_aanvoer"] = True +HHNK["gemaal"].loc[HHNK["gemaal"]["functiegemaal"].isin(circulatie_values), "func_circulatie"] = True + +HHNK["gemaal"].loc[ + (HHNK["gemaal"].func_afvoer is False) + & (HHNK["gemaal"].func_aanvoer is False) + & (HHNK["gemaal"].func_circulatie is False), + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + + +# gemaal +HHNK["gemaal"].rename(columns={"CODE": "code", "GLOBALID": "globalid"}, inplace=True) +HHNK["gemaal"]["nen3610id"] = "dummy_nen3610id_" + HHNK["gemaal"].index.astype( + str +) # create a string as the globalid is usually a str as well + +# stuw +HHNK["stuw"].rename(columns={"CODE": "code", "GLOBALID": "globalid"}, inplace=True) +HHNK["stuw"]["nen3610id"] = "dummy_nen3610id_" + HHNK["stuw"].index.astype( + str +) # create a string as the globalid is usually a str as well + + +# ### GPKG + + +# discard irrelevant dataHHNK +HHNK["stuw"] = HHNK["stuw"][["code", "globalid", "nen3610id", "geometry"]] +HHNK["gemaal"] = HHNK["gemaal"][ + ["code", "globalid", "nen3610id", "func_afvoer", "func_aanvoer", "func_circulatie", "geometry"] +] +HHNK["hydroobject"] = HHNK["hydroobject"][["code", "globalid", "nen3610id", "geometry"]] +HHNK["afsluitmiddel"] = HHNK["afsluitmiddel"][["code", "globalid", "nen3610id", "geometry"]] +HHNK["duikersifonhevel"] = HHNK["duikersifonhevel"][["code", "globalid", "nen3610id", "geometry"]] + + +# ### .GDB + + +data_gdb = data_gdb[["streefpeil", "geometry"]] +data_gdb["globalid"] = "dummy_globalid_" + data_gdb.index.astype( + str +) # create a string as the globalid is usually a str as well +streefpeil = data_gdb[["streefpeil", "globalid"]] +peilgebied = data_gdb[["globalid", "geometry"]] + +# add the data to the dictionary +HHNK["streefpeil"] = streefpeil +HHNK["peilgebied"] = peilgebied + +# hand made changes +HHNK["peilgebied"] = HHNK["peilgebied"][ + HHNK["peilgebied"]["globalid"] != 1725 +] # not a correct shape. Basically only lines, with 36 seperate segments + + +HHNK["streefpeil"] = HHNK["streefpeil"].rename(columns={"streefpeil": "waterhoogte"}) +HHNK["streefpeil"]["geometry"] = None +HHNK["streefpeil"] = gpd.GeoDataFrame(HHNK["streefpeil"], geometry="geometry") + + +# HHNK['streefpeil']['code'] = 'dummy_code_streefpeil_' + HHNK['streefpeil'].index.astype(str) +# HHNK['streefpeil']['nen3610id'] = 'dummy_nen3610id_streefpeil_' + HHNK['streefpeil'].index.astype(str) + +HHNK["peilgebied"]["code"] = "dummy_code_" + HHNK["peilgebied"].index.astype(str) +HHNK["peilgebied"]["nen3610id"] = "dummy_nen3610id_" + HHNK["peilgebied"].index.astype(str) +HHNK["peilgebied"]["HWS_BZM"] = False + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=HHNK) + + +# # Add the boezem and hoofdwatersysteem + +# Some changes by hand have been made. The resulting shapefile contains the bordering BZM and HWS shapes, including streefpeil + + +path_HWS_BZM = "..\..\Scripts\Aggregeren\Hoofdwatersysteem\BZM_HWS_HHNK.shp" +HWS_BZM = gpd.read_file(path_HWS_BZM) + + +HWS_BZM["code"] = "dummy_code_" + (HWS_BZM.index + max(HHNK["peilgebied"].index) + 1).astype(str) +HWS_BZM["globalid"] = "dummy_globalid_" + (HWS_BZM.index + max(HHNK["peilgebied"].index) + 1).astype(str) +HWS_BZM["nen3610id"] = "dummy_nen3610id_" + (HWS_BZM.index + max(HHNK["peilgebied"].index) + 1).astype(str) +HWS_BZM["waterhoogte"] = HWS_BZM["zomerpeil"] +HWS_BZM["HWS_BZM"] = True +HWS_BZM = HWS_BZM[["code", "globalid", "nen3610id", "waterhoogte", "HWS_BZM", "geometry"]] + +HWS_BZM_peilgebied = HWS_BZM[["code", "globalid", "nen3610id", "HWS_BZM", "geometry"]] +HWS_BZM_streefpeil = HWS_BZM[["waterhoogte", "globalid", "geometry"]] + +HHNK["peilgebied"] = gpd.GeoDataFrame(pd.concat([HHNK["peilgebied"], HWS_BZM_peilgebied])).reset_index(drop=True) +HHNK["streefpeil"] = gpd.GeoDataFrame(pd.concat([HHNK["streefpeil"], HWS_BZM_streefpeil])).reset_index(drop=True) + + +# ### Store data + + +store_data(waterschap=HHNK, output_gpkg_path=output_gpkg_path_HHNK) + +# Toevoegen aan notities: + +# Duikersifonhevel and hydroobject have a type of multicurvedZ, the peilgebieden a MultiSurfaceZ, which geopandas can not handle. I have manually exported these to single shapes, which automatically converts it to regular MultiStrings. Then these layers have been packed together to a geopackage again. + +# Peilmerk is geometrisch gekoppeld aan peilgebieden, niet administratief. Daarnaast zijn bij een aantal beschikbaar of deze gekoppeld zijn met een gemaal, stuw, duikersifonhevel (wel administratief). Wel is er een streefpeil tabel beschikbaar, die wel administratief gekoppeld is. Ga kijken wat het verschil is. + +# In de streefpeilen kaart zijn er verschillende soorten peilen: +# - winter +# - zomer +# - vast +# - dynamische bovengrens +# - dynamische ondergrens diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.ipynb deleted file mode 100644 index a336f57..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.ipynb +++ /dev/null @@ -1,560 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import os\n", - "\n", - "import geopandas as gpd\n", - "import numpy as np\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": { - "tags": [ - "test" - ] - }, - "outputs": [], - "source": [ - "from general_functions import *" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "pd.set_option(\"display.max_columns\", None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"HHSK\"\n", - "path_HHSK = \"..\\..\\Data_preprocessed\\Waterschappen\\HHSK\\HyDamo_metWasverzachter_20230905.gpkg\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/HHSK\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK = read_gpkg_layers(\n", - " gpkg_path=path_HHSK,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"afsluitmiddel\",\n", - " \"duikersifonhevel\",\n", - " \"hydroobject\",\n", - " # 'peilgebiedvigerend',\n", - " # 'peilafwijkinggebied',\n", - " # 'peilbesluitgebied',\n", - " \"streefpeil\",\n", - " ],\n", - " engine=\"pyogrio\",\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK_nalevering = read_gpkg_layers(\n", - " gpkg_path=r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\HHSK\\LHM_hhsk_nalevering.gpkg\",\n", - " variables=[\"Persleiding\"],\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK_2nalevering = read_gpkg_layers(\n", - " gpkg_path=r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\HHSK\\hhsklevering_07032024.gpkg\",\n", - " variables=[\"PeilgebiedPraktijk\", \"VigerendPeilgebiedZPP\"],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "## Only select status_object == 3" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"stuw\"] = HHSK[\"stuw\"].loc[(HHSK[\"stuw\"][\"statusobject\"] == \"3\") | (HHSK[\"stuw\"][\"statusobject\"] == 3)]\n", - "HHSK[\"gemaal\"] = HHSK[\"gemaal\"].loc[(HHSK[\"gemaal\"][\"statusobject\"] == \"3\") | (HHSK[\"gemaal\"][\"statusobject\"] == 3)]\n", - "HHSK[\"hydroobject\"] = HHSK[\"hydroobject\"].loc[\n", - " (HHSK[\"hydroobject\"][\"statusobject\"] == \"3\") | (HHSK[\"hydroobject\"][\"statusobject\"] == 3)\n", - "]\n", - "HHSK[\"duikersifonhevel\"] = HHSK[\"duikersifonhevel\"].loc[\n", - " (HHSK[\"duikersifonhevel\"][\"statusobject\"] == \"3\") | (HHSK[\"duikersifonhevel\"][\"statusobject\"] == 3)\n", - "]\n", - "HHSK[\"afsluitmiddel\"] = HHSK[\"afsluitmiddel\"].loc[\n", - " (HHSK[\"afsluitmiddel\"][\"statusobject\"] == \"3\") | (HHSK[\"afsluitmiddel\"][\"statusobject\"] == 3)\n", - "]\n", - "\n", - "# HHSK['peilgebiedvigerend'] = HHSK['peilgebiedvigerend'].loc[(HHSK['peilgebiedvigerend']['statusobject'] == '3') | (HHSK['peilgebiedvigerend']['statusobject'] == 3)]\n", - "# HHSK['peilafwijkinggebied'] = HHSK['peilafwijkinggebied'].loc[(HHSK['peilafwijkinggebied']['statusobject'] == '3') | (HHSK['peilafwijkinggebied']['statusobject'] == 3)]\n", - "\n", - "HHSK_nalevering[\"Persleiding\"] = HHSK_nalevering[\"Persleiding\"].loc[\n", - " (HHSK_nalevering[\"Persleiding\"][\"STATUSOBJECT\"] == \"3\") | (HHSK_nalevering[\"Persleiding\"][\"STATUSOBJECT\"] == 3)\n", - "]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"hydroobject\"][\"categorieoppwaterlichaam\"] = HHSK[\"hydroobject\"][\"categorieoppwaterlichaam\"].astype(float)\n", - "HHSK[\"hydroobject\"] = HHSK[\"hydroobject\"].loc[\n", - " HHSK[\"hydroobject\"][\"categorieoppwaterlichaam\"] == 1\n", - "] # only select primaire waterlichamen" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "# HHSK" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": { - "tags": [] - }, - "source": [ - "### Add the gemalen columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# determine aanvoer en afvoer gemalen\n", - "HHSK[\"gemaal\"][\"func_aanvoer\"], HHSK[\"gemaal\"][\"func_afvoer\"], HHSK[\"gemaal\"][\"func_circulatie\"] = (\n", - " False,\n", - " False,\n", - " False,\n", - ") # default is False\n", - "HHSK[\"gemaal\"][\"functiegemaal\"] = HHSK[\"gemaal\"][\"functiegemaal\"].astype(str)\n", - "\n", - "HHSK[\"gemaal\"].loc[HHSK[\"gemaal\"].functiegemaal.str.contains(\"2|4|5|6|7|99\"), \"func_afvoer\"] = True\n", - "HHSK[\"gemaal\"].loc[HHSK[\"gemaal\"].functiegemaal.str.contains(\"1|3|5|\"), \"func_aanvoer\"] = True\n", - "HHSK[\"gemaal\"].loc[HHSK[\"gemaal\"].functiegemaal.str.contains(\"8\"), \"func_circulatie\"] = True\n", - "HHSK[\"gemaal\"].loc[\n", - " (HHSK[\"gemaal\"].func_afvoer is False)\n", - " & (HHSK[\"gemaal\"].func_aanvoer is False)\n", - " & (HHSK[\"gemaal\"].func_circulatie is False),\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"stuw\"] = HHSK[\"stuw\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "HHSK[\"gemaal\"] = HHSK[\"gemaal\"][\n", - " [\"code\", \"nen3610id\", \"globalid\", \"func_afvoer\", \"func_aanvoer\", \"func_circulatie\", \"geometry\"]\n", - "]\n", - "HHSK[\"hydroobject\"] = HHSK[\"hydroobject\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "HHSK[\"duikersifonhevel\"] = HHSK[\"duikersifonhevel\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "HHSK[\"afsluitmiddel\"] = HHSK[\"afsluitmiddel\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "\n", - "# HHSK['peilgebiedvigerend'] = HHSK['peilgebiedvigerend'][['code', 'nen3610id', 'globalid', 'geometry']]\n", - "# HHSK['peilafwijkinggebied'] = HHSK['peilafwijkinggebied'][['code', 'nen3610id', 'globalid', 'geometry']]\n", - "\n", - "# HHSK['streefpeil'] = HHSK['streefpeil'][['code', 'nen3610id', 'globalid']]" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": { - "tags": [] - }, - "source": [ - "### Add the nageleverde persleidingen to the duikersifonhevels" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK_nalevering[\"Persleiding\"] = HHSK_nalevering[\"Persleiding\"][[\"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "HHSK_nalevering[\"Persleiding\"][\"nen3610id\"] = \"dummy_nen3610id_persleiding_\" + HHSK_nalevering[\n", - " \"Persleiding\"\n", - "].index.astype(str)\n", - "HHSK_nalevering[\"Persleiding\"] = HHSK_nalevering[\"Persleiding\"].rename(columns={\"CODE\": \"code\", \"GLOBALID\": \"globalid\"})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"duikersifonhevel\"] = gpd.GeoDataFrame(\n", - " pd.concat([HHSK[\"duikersifonhevel\"], HHSK_nalevering[\"Persleiding\"]])\n", - ").reset_index(drop=True)\n", - "\n", - "# HHSK has apparently non unique values for the global ids in duikersifonhevel\n", - "HHSK[\"duikersifonhevel\"][\"globalid\"] = \"dummy_globalid_duikersifonhevel_\" + HHSK[\"duikersifonhevel\"].index.astype(str)\n", - "\n", - "# remove empty point geomtries from the gemalen\n", - "HHSK[\"gemaal\"] = HHSK[\"gemaal\"][~HHSK[\"gemaal\"].is_empty].reset_index(drop=True)" - ] - }, - { - "cell_type": "markdown", - "id": "18", - "metadata": {}, - "source": [ - "### Add the nageleverde peilgebieden to the original data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "# edit the peilgebiedpraktijk\n", - "HHSK_2nalevering[\"PeilgebiedPraktijk\"][\"ONDERPEIL\"].fillna(\n", - " HHSK_2nalevering[\"PeilgebiedPraktijk\"][\"VASTPEIL\"], inplace=True\n", - ")\n", - "HHSK_2nalevering[\"PeilgebiedPraktijk\"][\"ONDERPEIL\"].fillna(\n", - " HHSK_2nalevering[\"PeilgebiedPraktijk\"][\"BOVENPEIL\"], inplace=True\n", - ")\n", - "HHSK_2nalevering[\"PeilgebiedPraktijk\"][\"ONDERPEIL\"].fillna(\n", - " HHSK_2nalevering[\"PeilgebiedPraktijk\"][\"SCHOUWPEIL\"], inplace=True\n", - ")\n", - "\n", - "HHSK[\"peilgebiedpraktijk\"] = HHSK_2nalevering[\"PeilgebiedPraktijk\"][[\"CODE\", \"ONDERPEIL\", \"BRONGID\", \"geometry\"]]\n", - "HHSK[\"peilgebiedpraktijk\"] = HHSK_2nalevering[\"PeilgebiedPraktijk\"].rename(\n", - " columns={\"CODE\": \"code\", \"ONDERPEIL\": \"waterhoogte\", \"BRONGID\": \"globalid\"}\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# edit the peilgebiedvigerend\n", - "HHSK_2nalevering[\"VigerendPeilgebiedZPP\"][\"ONDERPEIL\"].fillna(\n", - " HHSK_2nalevering[\"VigerendPeilgebiedZPP\"][\"VASTPEIL\"], inplace=True\n", - ")\n", - "HHSK_2nalevering[\"VigerendPeilgebiedZPP\"][\"ONDERPEIL\"].fillna(\n", - " HHSK_2nalevering[\"VigerendPeilgebiedZPP\"][\"BOVENPEIL\"], inplace=True\n", - ")\n", - "HHSK_2nalevering[\"VigerendPeilgebiedZPP\"][\"ONDERPEIL\"].fillna(\n", - " HHSK_2nalevering[\"VigerendPeilgebiedZPP\"][\"SCHOUWPEIL\"], inplace=True\n", - ")\n", - "\n", - "HHSK[\"peilgebiedvigerend\"] = HHSK_2nalevering[\"VigerendPeilgebiedZPP\"][[\"CODE\", \"ONDERPEIL\", \"BRONGID\", \"geometry\"]]\n", - "HHSK[\"peilgebiedvigerend\"] = HHSK_2nalevering[\"VigerendPeilgebiedZPP\"].rename(\n", - " columns={\"CODE\": \"code\", \"ONDERPEIL\": \"waterhoogte\", \"BRONGID\": \"globalid\"}\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "# select the peilgebiedpraktijk which have a primaire hydroobject\n", - "\n", - "# pgp_pw : PeilGebiedPraktijk Primaire Watergang\n", - "pgp_pw = gpd.sjoin(\n", - " left_df=HHSK_2nalevering[\"PeilgebiedPraktijk\"],\n", - " right_df=HHSK[\"hydroobject\"],\n", - " how=\"inner\",\n", - " predicate=\"contains\",\n", - " lsuffix=\"pgp\",\n", - " rsuffix=\"hydroobject\",\n", - ").drop_duplicates(subset=\"CODE\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "pgp_pw.plot()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "pgv_diff = gpd.sjoin(left_df=HHSK[\"peilgebiedvigerend\"], right_df=pgp_pw, how=\"inner\", predicate=\"intersects\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "pgv_diff.plot()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "# streefpeilen_PG_v = pd.merge(left = HHSK['peilgebiedvigerend'],\n", - "# right = HHSK['streefpeil'],\n", - "# left_on = 'globalid',\n", - "# right_on = 'peilgebiedvigerendid',\n", - "# suffixes = ('', '_streefpeil'))[['code', 'nen3610id', 'globalid', 'waterhoogte', 'geometry']]\n", - "\n", - "# streefpeilen_PG_a = pd.merge(left = HHSK['peilafwijkinggebied'],\n", - "# right = HHSK['streefpeil'],\n", - "# left_on = 'globalid',\n", - "# right_on = 'peilafwijkinggebiedid',\n", - "# suffixes = ('', '_streefpeil'))[['code', 'nen3610id', 'globalid', 'waterhoogte', 'geometry']]\n", - "\n", - "# fig, ax = plt.subplots()\n", - "# streefpeilen_PG_v.plot(ax = ax, color='cornflowerblue')\n", - "# streefpeilen_PG_a.plot(ax = ax, color='blue')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "# streefpeilen_PG_v.to_file('vigerend.shp')\n", - "# streefpeilen_PG_a.to_file('afwijking.shp')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "peilgebied = burn_in_peilgebieden(base_layer=streefpeilen_PG_v, overlay_layer=streefpeilen_PG_a, plot=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "streefpeil = gpd.GeoDataFrame()\n", - "streefpeil[\"waterhoogte\"] = peilgebied[\"waterhoogte\"]\n", - "streefpeil[\"globalid\"] = peilgebied[\"globalid\"]\n", - "streefpeil[\"geometry\"] = np.nan\n", - "HHSK[\"streefpeil\"] = gpd.GeoDataFrame(streefpeil)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"peilgebied\"] = gpd.GeoDataFrame(peilgebied[[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "HHSK[\"peilgebied\"] = HHSK[\"peilgebied\"].loc[HHSK[\"peilgebied\"][\"code\"] != \"GPG-1206\"] # Not a regular polygon" - ] - }, - { - "cell_type": "markdown", - "id": "31", - "metadata": {}, - "source": [ - "### Delete irrelevant data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": {}, - "outputs": [], - "source": [ - "# delete irrelvant data\n", - "variables = [\"peilgebiedpraktijk\", \"peilgebiedvigerend\", \"peilafwijkinggebied\"]\n", - "\n", - "for variable in variables:\n", - " if str(variable) in HHSK:\n", - " del HHSK[variable]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "# add duikersifonhevels to the hydroobjecten\n", - "HHSK[\"hydroobject\"] = pd.concat([HHSK[\"hydroobject\"], HHSK[\"duikersifonhevel\"]])\n", - "HHSK[\"hydroobject\"] = HHSK[\"hydroobject\"].drop_duplicates(subset=\"globalid\") # in case it is run multiple times\n", - "HHSK[\"hydroobject\"] = gpd.GeoDataFrame(HHSK[\"hydroobject\"]).set_crs(\"epsg:28992\")" - ] - }, - { - "cell_type": "markdown", - "id": "34", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=HHSK)" - ] - }, - { - "cell_type": "markdown", - "id": "36", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37", - "metadata": {}, - "outputs": [], - "source": [ - "# Check if the directory exists\n", - "if not os.path.exists(output_gpkg_path):\n", - " # If it doesn't exist, create it\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=HHSK, output_gpkg_path=output_gpkg_path + \"/HHSK\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:ribasim]", - "language": "python", - "name": "conda-env-ribasim-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py new file mode 100644 index 0000000..55bec86 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py @@ -0,0 +1,262 @@ +# import packages and functions +import os + +import geopandas as gpd +import numpy as np +import pandas as pd +from general_functions import * + +pd.set_option("display.max_columns", None) + + +# define relative paths +waterschap = "HHSK" +path_HHSK = "..\..\Data_preprocessed\Waterschappen\HHSK\HyDamo_metWasverzachter_20230905.gpkg" +output_gpkg_path = "../../Data_postprocessed/Waterschappen/HHSK" + + +HHSK = read_gpkg_layers( + gpkg_path=path_HHSK, + variables=[ + "stuw", + "gemaal", + "afsluitmiddel", + "duikersifonhevel", + "hydroobject", + # 'peilgebiedvigerend', + # 'peilafwijkinggebied', + # 'peilbesluitgebied', + "streefpeil", + ], + engine="pyogrio", +) + + +HHSK_nalevering = read_gpkg_layers( + gpkg_path=r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\HHSK\LHM_hhsk_nalevering.gpkg", + variables=["Persleiding"], +) + + +HHSK_2nalevering = read_gpkg_layers( + gpkg_path=r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\HHSK\hhsklevering_07032024.gpkg", + variables=["PeilgebiedPraktijk", "VigerendPeilgebiedZPP"], +) + + +# ## Only select status_object == 3 + + +HHSK["stuw"] = HHSK["stuw"].loc[(HHSK["stuw"]["statusobject"] == "3") | (HHSK["stuw"]["statusobject"] == 3)] +HHSK["gemaal"] = HHSK["gemaal"].loc[(HHSK["gemaal"]["statusobject"] == "3") | (HHSK["gemaal"]["statusobject"] == 3)] +HHSK["hydroobject"] = HHSK["hydroobject"].loc[ + (HHSK["hydroobject"]["statusobject"] == "3") | (HHSK["hydroobject"]["statusobject"] == 3) +] +HHSK["duikersifonhevel"] = HHSK["duikersifonhevel"].loc[ + (HHSK["duikersifonhevel"]["statusobject"] == "3") | (HHSK["duikersifonhevel"]["statusobject"] == 3) +] +HHSK["afsluitmiddel"] = HHSK["afsluitmiddel"].loc[ + (HHSK["afsluitmiddel"]["statusobject"] == "3") | (HHSK["afsluitmiddel"]["statusobject"] == 3) +] + +# HHSK['peilgebiedvigerend'] = HHSK['peilgebiedvigerend'].loc[(HHSK['peilgebiedvigerend']['statusobject'] == '3') | (HHSK['peilgebiedvigerend']['statusobject'] == 3)] +# HHSK['peilafwijkinggebied'] = HHSK['peilafwijkinggebied'].loc[(HHSK['peilafwijkinggebied']['statusobject'] == '3') | (HHSK['peilafwijkinggebied']['statusobject'] == 3)] + +HHSK_nalevering["Persleiding"] = HHSK_nalevering["Persleiding"].loc[ + (HHSK_nalevering["Persleiding"]["STATUSOBJECT"] == "3") | (HHSK_nalevering["Persleiding"]["STATUSOBJECT"] == 3) +] + + +HHSK["hydroobject"]["categorieoppwaterlichaam"] = HHSK["hydroobject"]["categorieoppwaterlichaam"].astype(float) +HHSK["hydroobject"] = HHSK["hydroobject"].loc[ + HHSK["hydroobject"]["categorieoppwaterlichaam"] == 1 +] # only select primaire waterlichamen + + +# # HHSK + +# ### Add the gemalen columns + + +# determine aanvoer en afvoer gemalen +HHSK["gemaal"]["func_aanvoer"], HHSK["gemaal"]["func_afvoer"], HHSK["gemaal"]["func_circulatie"] = ( + False, + False, + False, +) # default is False +HHSK["gemaal"]["functiegemaal"] = HHSK["gemaal"]["functiegemaal"].astype(str) + +HHSK["gemaal"].loc[HHSK["gemaal"].functiegemaal.str.contains("2|4|5|6|7|99"), "func_afvoer"] = True +HHSK["gemaal"].loc[HHSK["gemaal"].functiegemaal.str.contains("1|3|5|"), "func_aanvoer"] = True +HHSK["gemaal"].loc[HHSK["gemaal"].functiegemaal.str.contains("8"), "func_circulatie"] = True +HHSK["gemaal"].loc[ + (HHSK["gemaal"].func_afvoer is False) + & (HHSK["gemaal"].func_aanvoer is False) + & (HHSK["gemaal"].func_circulatie is False), + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + + +HHSK["stuw"] = HHSK["stuw"][["code", "nen3610id", "globalid", "geometry"]] +HHSK["gemaal"] = HHSK["gemaal"][ + ["code", "nen3610id", "globalid", "func_afvoer", "func_aanvoer", "func_circulatie", "geometry"] +] +HHSK["hydroobject"] = HHSK["hydroobject"][["code", "nen3610id", "globalid", "geometry"]] +HHSK["duikersifonhevel"] = HHSK["duikersifonhevel"][["code", "nen3610id", "globalid", "geometry"]] +HHSK["afsluitmiddel"] = HHSK["afsluitmiddel"][["code", "nen3610id", "globalid", "geometry"]] + +# HHSK['peilgebiedvigerend'] = HHSK['peilgebiedvigerend'][['code', 'nen3610id', 'globalid', 'geometry']] +# HHSK['peilafwijkinggebied'] = HHSK['peilafwijkinggebied'][['code', 'nen3610id', 'globalid', 'geometry']] + +# HHSK['streefpeil'] = HHSK['streefpeil'][['code', 'nen3610id', 'globalid']] + + +# ### Add the nageleverde persleidingen to the duikersifonhevels + + +HHSK_nalevering["Persleiding"] = HHSK_nalevering["Persleiding"][["CODE", "GLOBALID", "geometry"]] +HHSK_nalevering["Persleiding"]["nen3610id"] = "dummy_nen3610id_persleiding_" + HHSK_nalevering[ + "Persleiding" +].index.astype(str) +HHSK_nalevering["Persleiding"] = HHSK_nalevering["Persleiding"].rename(columns={"CODE": "code", "GLOBALID": "globalid"}) + + +HHSK["duikersifonhevel"] = gpd.GeoDataFrame( + pd.concat([HHSK["duikersifonhevel"], HHSK_nalevering["Persleiding"]]) +).reset_index(drop=True) + +# HHSK has apparently non unique values for the global ids in duikersifonhevel +HHSK["duikersifonhevel"]["globalid"] = "dummy_globalid_duikersifonhevel_" + HHSK["duikersifonhevel"].index.astype(str) + +# remove empty point geomtries from the gemalen +HHSK["gemaal"] = HHSK["gemaal"][~HHSK["gemaal"].is_empty].reset_index(drop=True) + + +# ### Add the nageleverde peilgebieden to the original data + + +# edit the peilgebiedpraktijk +HHSK_2nalevering["PeilgebiedPraktijk"]["ONDERPEIL"].fillna( + HHSK_2nalevering["PeilgebiedPraktijk"]["VASTPEIL"], inplace=True +) +HHSK_2nalevering["PeilgebiedPraktijk"]["ONDERPEIL"].fillna( + HHSK_2nalevering["PeilgebiedPraktijk"]["BOVENPEIL"], inplace=True +) +HHSK_2nalevering["PeilgebiedPraktijk"]["ONDERPEIL"].fillna( + HHSK_2nalevering["PeilgebiedPraktijk"]["SCHOUWPEIL"], inplace=True +) + +HHSK["peilgebiedpraktijk"] = HHSK_2nalevering["PeilgebiedPraktijk"][["CODE", "ONDERPEIL", "BRONGID", "geometry"]] +HHSK["peilgebiedpraktijk"] = HHSK_2nalevering["PeilgebiedPraktijk"].rename( + columns={"CODE": "code", "ONDERPEIL": "waterhoogte", "BRONGID": "globalid"} +) + + +# edit the peilgebiedvigerend +HHSK_2nalevering["VigerendPeilgebiedZPP"]["ONDERPEIL"].fillna( + HHSK_2nalevering["VigerendPeilgebiedZPP"]["VASTPEIL"], inplace=True +) +HHSK_2nalevering["VigerendPeilgebiedZPP"]["ONDERPEIL"].fillna( + HHSK_2nalevering["VigerendPeilgebiedZPP"]["BOVENPEIL"], inplace=True +) +HHSK_2nalevering["VigerendPeilgebiedZPP"]["ONDERPEIL"].fillna( + HHSK_2nalevering["VigerendPeilgebiedZPP"]["SCHOUWPEIL"], inplace=True +) + +HHSK["peilgebiedvigerend"] = HHSK_2nalevering["VigerendPeilgebiedZPP"][["CODE", "ONDERPEIL", "BRONGID", "geometry"]] +HHSK["peilgebiedvigerend"] = HHSK_2nalevering["VigerendPeilgebiedZPP"].rename( + columns={"CODE": "code", "ONDERPEIL": "waterhoogte", "BRONGID": "globalid"} +) + + +# select the peilgebiedpraktijk which have a primaire hydroobject + +# pgp_pw : PeilGebiedPraktijk Primaire Watergang +pgp_pw = gpd.sjoin( + left_df=HHSK_2nalevering["PeilgebiedPraktijk"], + right_df=HHSK["hydroobject"], + how="inner", + predicate="contains", + lsuffix="pgp", + rsuffix="hydroobject", +).drop_duplicates(subset="CODE") + + +pgp_pw.plot() + + +pgv_diff = gpd.sjoin(left_df=HHSK["peilgebiedvigerend"], right_df=pgp_pw, how="inner", predicate="intersects") + + +pgv_diff.plot() + + +# streefpeilen_PG_v = pd.merge(left = HHSK['peilgebiedvigerend'], +# right = HHSK['streefpeil'], +# left_on = 'globalid', +# right_on = 'peilgebiedvigerendid', +# suffixes = ('', '_streefpeil'))[['code', 'nen3610id', 'globalid', 'waterhoogte', 'geometry']] + +# streefpeilen_PG_a = pd.merge(left = HHSK['peilafwijkinggebied'], +# right = HHSK['streefpeil'], +# left_on = 'globalid', +# right_on = 'peilafwijkinggebiedid', +# suffixes = ('', '_streefpeil'))[['code', 'nen3610id', 'globalid', 'waterhoogte', 'geometry']] + +# fig, ax = plt.subplots() +# streefpeilen_PG_v.plot(ax = ax, color='cornflowerblue') +# streefpeilen_PG_a.plot(ax = ax, color='blue') + + +# streefpeilen_PG_v.to_file('vigerend.shp') +# streefpeilen_PG_a.to_file('afwijking.shp') + + +peilgebied = burn_in_peilgebieden(base_layer=streefpeilen_PG_v, overlay_layer=streefpeilen_PG_a, plot=True) + + +streefpeil = gpd.GeoDataFrame() +streefpeil["waterhoogte"] = peilgebied["waterhoogte"] +streefpeil["globalid"] = peilgebied["globalid"] +streefpeil["geometry"] = np.nan +HHSK["streefpeil"] = gpd.GeoDataFrame(streefpeil) + + +HHSK["peilgebied"] = gpd.GeoDataFrame(peilgebied[["code", "nen3610id", "globalid", "geometry"]]) + + +HHSK["peilgebied"] = HHSK["peilgebied"].loc[HHSK["peilgebied"]["code"] != "GPG-1206"] # Not a regular polygon + + +# ### Delete irrelevant data + + +# delete irrelvant data +variables = ["peilgebiedpraktijk", "peilgebiedvigerend", "peilafwijkinggebied"] + +for variable in variables: + if str(variable) in HHSK: + del HHSK[variable] + + +# add duikersifonhevels to the hydroobjecten +HHSK["hydroobject"] = pd.concat([HHSK["hydroobject"], HHSK["duikersifonhevel"]]) +HHSK["hydroobject"] = HHSK["hydroobject"].drop_duplicates(subset="globalid") # in case it is run multiple times +HHSK["hydroobject"] = gpd.GeoDataFrame(HHSK["hydroobject"]).set_crs("epsg:28992") + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=HHSK) + + +# ### Store data + + +# Check if the directory exists +if not os.path.exists(output_gpkg_path): + # If it doesn't exist, create it + os.makedirs(output_gpkg_path) + +store_data(waterschap=HHSK, output_gpkg_path=output_gpkg_path + "/HHSK") diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.ipynb deleted file mode 100644 index d759394..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.ipynb +++ /dev/null @@ -1,298 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import os\n", - "\n", - "import geopandas as gpd\n", - "import numpy as np\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "from general_functions import *" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "# HD" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Hollandse_Delta\"\n", - "data_path = \"../../Data_preprocessed/Waterschappen/Hollandse_Delta/Hollandse_Delta_aangepast.gpkg\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/Hollandse_Delta\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "HD = read_gpkg_layers(\n", - " gpkg_path=data_path,\n", - " variables=[\n", - " \"stuwen\",\n", - " \"gemalen\",\n", - " \"afsluitmiddelen\",\n", - " \"sluizen\",\n", - " \"HydroObjectWatergangtype\", # = hydroobject\n", - " \"HydroObjectKunstwerkvakken\",\n", - " ],\n", - ") # = duikersifonhevel\n", - "\n", - "# change names\n", - "HD[\"stuw\"] = HD.pop(\"stuwen\")\n", - "HD[\"gemaal\"] = HD.pop(\"gemalen\")\n", - "HD[\"afsluitmiddel\"] = HD.pop(\"afsluitmiddelen\")\n", - "HD[\"sluis\"] = HD.pop(\"sluizen\")\n", - "HD[\"hydroobject\"] = HD.pop(\"HydroObjectWatergangtype\")\n", - "HD[\"duikersifonhevel\"] = HD.pop(\"HydroObjectKunstwerkvakken\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# see the readme: peilgebieden have not be delivered properly. This data is retrieved from an external online database.\n", - "# peilgebieden_path = '../../Data_preprocessed/Waterschappen/Hollandse_Delta/WSHD_Peilgebieden_(Vigerend).shp'\n", - "peilgebieden_path_all = (\n", - " \"../../Data_preprocessed/Waterschappen/Hollandse_Delta/Nalvering_tweede/Shapefiles/PeilgebiedenPraktijk.shp\"\n", - ")\n", - "peilgebieden_path_geen_AP = (\n", - " \"../../Data_preprocessed/Waterschappen/Hollandse_Delta/Nalvering_tweede/Shapefiles/PeilgebiedenGeenAP.shp\"\n", - ")\n", - "\n", - "# load in both the old ones and the new ones, as the new ones do not have the peilgebiedafwijkingen, but they dont have the streefpeilen. These are stored in the old ones.\n", - "peilgebieden_all = gpd.read_file(peilgebieden_path_all)\n", - "peilgebieden_geen_AP = gpd.read_file(peilgebieden_path_geen_AP)\n", - "\n", - "peilgebieden = pd.merge(left=peilgebieden_all, right=peilgebieden_geen_AP, on=\"Dis\", suffixes=(\"_all\", \"\"))\n", - "\n", - "# add the shapefile to the dictionary\n", - "HD[\"peilgebied\"] = peilgebieden" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "### Adjust column names" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "# determine aanvoer en afvoer gemalen\n", - "HD[\"gemaal\"][\"func_aanvoer\"], HD[\"gemaal\"][\"func_afvoer\"], HD[\"gemaal\"][\"func_circulatie\"] = (\n", - " False,\n", - " False,\n", - " False,\n", - ") # default is False\n", - "HD[\"gemaal\"][\"functiegemaal\"] = HD[\"gemaal\"][\"FUNCTIEGEM\"].astype(str)\n", - "HD[\"gemaal\"].loc[HD[\"gemaal\"][\"functiegemaal\"] == \"Onbekend\", \"functiegemaal\"] = (\n", - " np.nan\n", - ") # replace onbekend with nan, will be filled up later see one line below\n", - "HD[\"gemaal\"][\"functiegemaal\"].fillna(\n", - " HD[\"gemaal\"][\"NAAM\"], inplace=True\n", - ") # some additional information may given in this column\n", - "HD[\"gemaal\"][\"functiegemaal\"] = HD[\"gemaal\"][\"functiegemaal\"].astype(str) # again, otherise issue True False\n", - "\n", - "HD[\"gemaal\"].loc[HD[\"gemaal\"].functiegemaal.str.contains(\"f-|fvoer|nderbemaling\"), \"func_afvoer\"] = True\n", - "HD[\"gemaal\"].loc[HD[\"gemaal\"].functiegemaal.str.contains(\"anvoergemaal|pmaling\"), \"func_aanvoer\"] = True\n", - "HD[\"gemaal\"].loc[HD[\"gemaal\"].functiegemaal.str.contains(\"Doorspoel\"), \"func_circulatie\"] = True\n", - "HD[\"gemaal\"].loc[\n", - " (HD[\"gemaal\"].func_afvoer is False)\n", - " & (HD[\"gemaal\"].func_aanvoer is False)\n", - " & (HD[\"gemaal\"].func_circulatie is False),\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# discard irrelevant data of HD HD, and create a uniform dataset compared to the other waterschappen\n", - "# Stuw\n", - "HD[\"stuw\"] = HD[\"stuw\"][[\"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "HD[\"stuw\"] = HD[\"stuw\"].rename(columns={\"CODE\": \"code\", \"GLOBALID\": \"globalid\"})\n", - "HD[\"stuw\"][\"nen3610id\"] = \"dummy_nen3610id_stuw_\" + HD[\"stuw\"].index.astype(str)\n", - "\n", - "# Gemaal\n", - "HD[\"gemaal\"] = HD[\"gemaal\"][[\"CODE\", \"GLOBALID\", \"func_afvoer\", \"func_aanvoer\", \"func_circulatie\", \"geometry\"]]\n", - "HD[\"gemaal\"] = HD[\"gemaal\"].rename(columns={\"GLOBALID\": \"globalid\", \"CODE\": \"code\"})\n", - "HD[\"gemaal\"][\"nen3610id\"] = \"dummy_nen3610id_gemaal_\" + HD[\"gemaal\"].index.astype(str)\n", - "\n", - "# Hydroobject\n", - "HD[\"hydroobject\"] = HD[\"hydroobject\"][[\"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "HD[\"hydroobject\"] = HD[\"hydroobject\"].rename(columns={\"GLOBALID\": \"globalid\", \"CODE\": \"code\"})\n", - "HD[\"hydroobject\"][\"nen3610id\"] = \"dummy_nen3610id_hydroobject_\" + HD[\"hydroobject\"].index.astype(str)\n", - "\n", - "# sluis\n", - "HD[\"sluis\"] = HD[\"sluis\"][[\"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "HD[\"sluis\"] = HD[\"sluis\"].rename(columns={\"GLOBALID\": \"globalid\"})\n", - "HD[\"sluis\"][\"nen3610id\"] = \"dummy_nen3610id_sluis_\" + HD[\"sluis\"].index.astype(str)\n", - "\n", - "# duikersifonhevel\n", - "HD[\"duikersifonhevel\"] = HD[\"duikersifonhevel\"][[\"CODE\", \"GLOBALID\", \"geometry\"]]\n", - "HD[\"duikersifonhevel\"] = HD[\"duikersifonhevel\"].rename(columns={\"CODE\": \"code\", \"GLOBALID\": \"globalid\"})\n", - "HD[\"duikersifonhevel\"][\"nen3610id\"] = \"dummy_nen3610id_duikersifonhevel_\" + HD[\"duikersifonhevel\"].index.astype(str)\n", - "\n", - "# afsluitmiddel\n", - "HD[\"afsluitmiddel\"] = HD[\"afsluitmiddel\"][[\"CODE\", \"geometry\"]]\n", - "HD[\"afsluitmiddel\"] = HD[\"afsluitmiddel\"].rename(columns={\"CODE\": \"code\"})\n", - "HD[\"afsluitmiddel\"][\"nen3610id\"] = \"dummy_nen3610id_afsluitmiddel_\" + HD[\"afsluitmiddel\"].index.astype(str)\n", - "HD[\"afsluitmiddel\"][\"globalid\"] = \"dummy_globalid_afsluitmiddel_\" + HD[\"afsluitmiddel\"].index.astype(str)\n", - "\n", - "# Peilgebied\n", - "HD[\"peilgebied\"] = HD[\"peilgebied\"][[\"REKENPEIL_\", \"NAAM_BEMAL\", \"geometry\"]]\n", - "HD[\"peilgebied\"][\"nen3610id\"] = \"dummy_nen3610id_peilgebied_\" + HD[\"peilgebied\"].index.astype(str)\n", - "HD[\"peilgebied\"][\"globalid\"] = \"dummy_globalid_peilgebied_\" + HD[\"peilgebied\"].index.astype(str)\n", - "HD[\"peilgebied\"] = HD[\"peilgebied\"].rename(columns={\"REKENPEIL_\": \"streefpeil\", \"NAAM_BEMAL\": \"code\"})\n", - "\n", - "# Streefpeil\n", - "HD[\"streefpeil\"] = pd.DataFrame()\n", - "HD[\"streefpeil\"][\"waterhoogte\"] = HD[\"peilgebied\"][\"streefpeil\"]\n", - "HD[\"streefpeil\"][\"globalid\"] = HD[\"peilgebied\"][\"globalid\"]\n", - "HD[\"streefpeil\"][\"geometry\"] = None\n", - "HD[\"streefpeil\"] = gpd.GeoDataFrame(HD[\"streefpeil\"], geometry=\"geometry\")\n", - "\n", - "# Remove the streefpeil from the peilgebied\n", - "HD[\"peilgebied\"] = HD[\"peilgebied\"][[\"code\", \"geometry\", \"nen3610id\", \"globalid\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# delete irrelvant data\n", - "variables = [\"peilgebiedpraktijk\"]\n", - "\n", - "for variable in variables:\n", - " if str(variable) in HD:\n", - " del HD[variable]" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=HD)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "peilgebieden.plot()" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# Check if the directory exists\n", - "if not os.path.exists(output_gpkg_path):\n", - " # If it doesn't exist, create it\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=HD, output_gpkg_path=output_gpkg_path + \"/HD\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py new file mode 100644 index 0000000..567ed70 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py @@ -0,0 +1,161 @@ +# import packages and functions +import os + +import geopandas as gpd +import numpy as np +import pandas as pd +from general_functions import * + +# # HD + + +# define relative paths +waterschap = "Hollandse_Delta" +data_path = "../../Data_preprocessed/Waterschappen/Hollandse_Delta/Hollandse_Delta_aangepast.gpkg" +output_gpkg_path = "../../Data_postprocessed/Waterschappen/Hollandse_Delta" + + +HD = read_gpkg_layers( + gpkg_path=data_path, + variables=[ + "stuwen", + "gemalen", + "afsluitmiddelen", + "sluizen", + "HydroObjectWatergangtype", # = hydroobject + "HydroObjectKunstwerkvakken", + ], +) # = duikersifonhevel + +# change names +HD["stuw"] = HD.pop("stuwen") +HD["gemaal"] = HD.pop("gemalen") +HD["afsluitmiddel"] = HD.pop("afsluitmiddelen") +HD["sluis"] = HD.pop("sluizen") +HD["hydroobject"] = HD.pop("HydroObjectWatergangtype") +HD["duikersifonhevel"] = HD.pop("HydroObjectKunstwerkvakken") + + +# see the readme: peilgebieden have not be delivered properly. This data is retrieved from an external online database. +# peilgebieden_path = '../../Data_preprocessed/Waterschappen/Hollandse_Delta/WSHD_Peilgebieden_(Vigerend).shp' +peilgebieden_path_all = ( + "../../Data_preprocessed/Waterschappen/Hollandse_Delta/Nalvering_tweede/Shapefiles/PeilgebiedenPraktijk.shp" +) +peilgebieden_path_geen_AP = ( + "../../Data_preprocessed/Waterschappen/Hollandse_Delta/Nalvering_tweede/Shapefiles/PeilgebiedenGeenAP.shp" +) + +# load in both the old ones and the new ones, as the new ones do not have the peilgebiedafwijkingen, but they dont have the streefpeilen. These are stored in the old ones. +peilgebieden_all = gpd.read_file(peilgebieden_path_all) +peilgebieden_geen_AP = gpd.read_file(peilgebieden_path_geen_AP) + +peilgebieden = pd.merge(left=peilgebieden_all, right=peilgebieden_geen_AP, on="Dis", suffixes=("_all", "")) + +# add the shapefile to the dictionary +HD["peilgebied"] = peilgebieden + + +# ### Adjust column names + + +# determine aanvoer en afvoer gemalen +HD["gemaal"]["func_aanvoer"], HD["gemaal"]["func_afvoer"], HD["gemaal"]["func_circulatie"] = ( + False, + False, + False, +) # default is False +HD["gemaal"]["functiegemaal"] = HD["gemaal"]["FUNCTIEGEM"].astype(str) +HD["gemaal"].loc[HD["gemaal"]["functiegemaal"] == "Onbekend", "functiegemaal"] = ( + np.nan +) # replace onbekend with nan, will be filled up later see one line below +HD["gemaal"]["functiegemaal"].fillna( + HD["gemaal"]["NAAM"], inplace=True +) # some additional information may given in this column +HD["gemaal"]["functiegemaal"] = HD["gemaal"]["functiegemaal"].astype(str) # again, otherise issue True False + +HD["gemaal"].loc[HD["gemaal"].functiegemaal.str.contains("f-|fvoer|nderbemaling"), "func_afvoer"] = True +HD["gemaal"].loc[HD["gemaal"].functiegemaal.str.contains("anvoergemaal|pmaling"), "func_aanvoer"] = True +HD["gemaal"].loc[HD["gemaal"].functiegemaal.str.contains("Doorspoel"), "func_circulatie"] = True +HD["gemaal"].loc[ + (HD["gemaal"].func_afvoer is False) + & (HD["gemaal"].func_aanvoer is False) + & (HD["gemaal"].func_circulatie is False), + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + + +# discard irrelevant data of HD HD, and create a uniform dataset compared to the other waterschappen +# Stuw +HD["stuw"] = HD["stuw"][["CODE", "GLOBALID", "geometry"]] +HD["stuw"] = HD["stuw"].rename(columns={"CODE": "code", "GLOBALID": "globalid"}) +HD["stuw"]["nen3610id"] = "dummy_nen3610id_stuw_" + HD["stuw"].index.astype(str) + +# Gemaal +HD["gemaal"] = HD["gemaal"][["CODE", "GLOBALID", "func_afvoer", "func_aanvoer", "func_circulatie", "geometry"]] +HD["gemaal"] = HD["gemaal"].rename(columns={"GLOBALID": "globalid", "CODE": "code"}) +HD["gemaal"]["nen3610id"] = "dummy_nen3610id_gemaal_" + HD["gemaal"].index.astype(str) + +# Hydroobject +HD["hydroobject"] = HD["hydroobject"][["CODE", "GLOBALID", "geometry"]] +HD["hydroobject"] = HD["hydroobject"].rename(columns={"GLOBALID": "globalid", "CODE": "code"}) +HD["hydroobject"]["nen3610id"] = "dummy_nen3610id_hydroobject_" + HD["hydroobject"].index.astype(str) + +# sluis +HD["sluis"] = HD["sluis"][["CODE", "GLOBALID", "geometry"]] +HD["sluis"] = HD["sluis"].rename(columns={"GLOBALID": "globalid"}) +HD["sluis"]["nen3610id"] = "dummy_nen3610id_sluis_" + HD["sluis"].index.astype(str) + +# duikersifonhevel +HD["duikersifonhevel"] = HD["duikersifonhevel"][["CODE", "GLOBALID", "geometry"]] +HD["duikersifonhevel"] = HD["duikersifonhevel"].rename(columns={"CODE": "code", "GLOBALID": "globalid"}) +HD["duikersifonhevel"]["nen3610id"] = "dummy_nen3610id_duikersifonhevel_" + HD["duikersifonhevel"].index.astype(str) + +# afsluitmiddel +HD["afsluitmiddel"] = HD["afsluitmiddel"][["CODE", "geometry"]] +HD["afsluitmiddel"] = HD["afsluitmiddel"].rename(columns={"CODE": "code"}) +HD["afsluitmiddel"]["nen3610id"] = "dummy_nen3610id_afsluitmiddel_" + HD["afsluitmiddel"].index.astype(str) +HD["afsluitmiddel"]["globalid"] = "dummy_globalid_afsluitmiddel_" + HD["afsluitmiddel"].index.astype(str) + +# Peilgebied +HD["peilgebied"] = HD["peilgebied"][["REKENPEIL_", "NAAM_BEMAL", "geometry"]] +HD["peilgebied"]["nen3610id"] = "dummy_nen3610id_peilgebied_" + HD["peilgebied"].index.astype(str) +HD["peilgebied"]["globalid"] = "dummy_globalid_peilgebied_" + HD["peilgebied"].index.astype(str) +HD["peilgebied"] = HD["peilgebied"].rename(columns={"REKENPEIL_": "streefpeil", "NAAM_BEMAL": "code"}) + +# Streefpeil +HD["streefpeil"] = pd.DataFrame() +HD["streefpeil"]["waterhoogte"] = HD["peilgebied"]["streefpeil"] +HD["streefpeil"]["globalid"] = HD["peilgebied"]["globalid"] +HD["streefpeil"]["geometry"] = None +HD["streefpeil"] = gpd.GeoDataFrame(HD["streefpeil"], geometry="geometry") + +# Remove the streefpeil from the peilgebied +HD["peilgebied"] = HD["peilgebied"][["code", "geometry", "nen3610id", "globalid"]] + + +# delete irrelvant data +variables = ["peilgebiedpraktijk"] + +for variable in variables: + if str(variable) in HD: + del HD[variable] + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=HD) + + +peilgebieden.plot() + + +# ### Store data + + +# Check if the directory exists +if not os.path.exists(output_gpkg_path): + # If it doesn't exist, create it + os.makedirs(output_gpkg_path) + +store_data(waterschap=HD, output_gpkg_path=output_gpkg_path + "/HD") diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.ipynb deleted file mode 100644 index 67cc146..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.ipynb +++ /dev/null @@ -1,481 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import os\n", - "\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "from general_functions import *" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "pd.set_option(\"display.max_columns\", None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Rijnland\"\n", - "path_Rijnland = \"..\\..\\Data_preprocessed\\Waterschappen\\Rijnland\\DataRijnland\\DataRijnland.gpkg\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/Rijnland\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "Rijnland = read_gpkg_layers(\n", - " gpkg_path=path_Rijnland,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " \"afsluitmiddel\",\n", - " \"duikersifonhevel\",\n", - " \"hydroobject\",\n", - " \"peilgebiedvigerend\",\n", - " \"peilgebiedpraktijk\",\n", - " \"peilafwijkinggebied\",\n", - " \"streefpeil\",\n", - " ],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "# Rijnland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "fig, ax = plt.subplots()\n", - "Rijnland[\"peilgebiedvigerend\"].plot(ax=ax, color=\"blue\")\n", - "Rijnland[\"peilgebiedpraktijk\"].plot(ax=ax, color=\"orange\")\n", - "Rijnland[\"peilafwijkinggebied\"].plot(ax=ax, color=\"green\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "Rijnland[\"stuw\"] = Rijnland[\"stuw\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"gemaal\"] = Rijnland[\"gemaal\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"afsluitmiddel\"] = Rijnland[\"afsluitmiddel\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"hydroobject\"] = Rijnland[\"hydroobject\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"duikersifonhevel\"] = Rijnland[\"duikersifonhevel\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"peilgebiedpraktijk\"] = Rijnland[\"peilgebiedpraktijk\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"peilafwijkinggebied\"] = Rijnland[\"peilafwijkinggebied\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"peilgebiedvigerend\"] = Rijnland[\"peilgebiedvigerend\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# fix geometries\n", - "Rijnland[\"peilgebiedvigerend\"][\"geometry\"] = Rijnland[\"peilgebiedvigerend\"].buffer(distance=0)\n", - "Rijnland[\"peilgebiedpraktijk\"][\"geometry\"] = Rijnland[\"peilgebiedpraktijk\"].buffer(distance=0)\n", - "Rijnland[\"peilafwijkinggebied\"][\"geometry\"] = Rijnland[\"peilafwijkinggebied\"].buffer(distance=0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# peilgebied = burn_in_peilgebieden(base_layer = Rijnland['peilgebiedpraktijk'],\n", - "# overlay_layer = Rijnland['peilafwijkinggebied'],\n", - "# plot = True)\n", - "# Rijnland['peilgebied'] = gpd.GeoDataFrame(peilgebied)\n", - "\n", - "peilgebied = burn_in_peilgebieden(\n", - " base_layer=Rijnland[\"peilgebiedvigerend\"], overlay_layer=Rijnland[\"peilgebiedpraktijk\"], plot=True\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "peilgebied = gpd.GeoDataFrame(peilgebied)\n", - "peilgebied = peilgebied[peilgebied.geometry.type.isin([\"Polygon\", \"MultiPolygon\"])] # only select polygons\n", - "\n", - "Rijnland[\"peilgebied\"] = gpd.GeoDataFrame(peilgebied)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "gpd.GeoDataFrame(peilgebied).to_file(\"Rijnland_test_kan_weg.shp\")\n", - "# peilgebied" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# # Explode the multipolygons into separate parts\n", - "# exploded_peilgebied = Rijnland['peilgebied'].explode('geometry')\n", - "\n", - "# # Check if each part is a single polygon\n", - "# is_simple_polygon = exploded_peilgebied['geometry'].apply(lambda geom: geom.type == 'Polygon')\n", - "\n", - "# # Select only the simple polygons from the exploded GeoDataFrame\n", - "# simple_peilgebied = exploded_peilgebied[is_simple_polygon]\n", - "# simple_peilgebied" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# #convert multi polygon to single polygon\n", - "# Rijnland['peilgebied'] = Rijnland['peilgebied'].explode()\n", - "# Rijnland['peilgebied']['nen3610id'] = 'dummy_nen3610id_duikersifonhevel_' + Rijnland['peilgebied'].index.astype(str)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "Rijnland[\"streefpeil\"].peilgebiedpraktijkid.fillna(value=Rijnland[\"streefpeil\"][\"peilgebiedvigerendid\"], inplace=True)\n", - "# Rijnland['streefpeil'].drop_duplicates(subset=['peilgebiedpraktijkid'], inplace=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# get rid of irrelevant streefpeilen, which otherwise results in too many overlapped peilgebieden\n", - "filter_condition = Rijnland[\"streefpeil\"][\"soortstreefpeil\"].isin(\n", - " [\"omer\", \"ondergrens\"]\n", - ") #'omer' for all rows where something of zomer, Zomer, dynamische zomer, etc, is used\n", - "kept_rows = Rijnland[\"streefpeil\"][filter_condition]\n", - "\n", - "other_rows = Rijnland[\"streefpeil\"][~filter_condition].drop_duplicates(subset=[\"peilgebiedpraktijkid\"])\n", - "Rijnland[\"streefpeil\"] = pd.concat([kept_rows, other_rows])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# Rijnland['streefpeil'].peilafwijkinggebiedid.fillna(value=Rijnland['streefpeil']['peilgebiedpraktijkid'], inplace=True)\n", - "# Rijnland['streefpeil'].peilgebiedpraktijkid.fillna(value=Rijnland['streefpeil']['peilgebiedvigerendid'], inplace=True)\n", - "\n", - "pg_sp = pd.merge(\n", - " left=peilgebied,\n", - " right=Rijnland[\"streefpeil\"],\n", - " left_on=\"globalid\",\n", - " right_on=\"peilgebiedpraktijkid\",\n", - " suffixes=(\"\", \"_streefpeil\"),\n", - ")\n", - "\n", - "pg_sp[\"geometry\"] = gpd.GeoDataFrame(geometry=pg_sp[\"geometry\"]).reset_index(drop=True)\n", - "# pg_sp = pg_sp.explode('geometry',ignore_index=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# gpd.GeoDataFrame(pg_sp.loc[pg_sp.code != 'PBS_WW-25AS'], geometry='geometry').set_crs('EPSG:28992').to_file('peilgebieden_Rijnland.gpkg', driver='GPKG')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# there are duplicate codes, nen3610ids and globalids due to the exploded function. Rename these.\n", - "pg_sp[\"nen3610id\"] = \"dummy_nen3610id_peilgebied_\" + pg_sp.index.astype(str)\n", - "pg_sp[\"code\"] = \"dummy_code_peilgebied_\" + pg_sp.index.astype(str)\n", - "pg_sp[\"globalid\"] = \"dummy_globalid_peilgebied_\" + pg_sp.index.astype(str)\n", - "\n", - "Rijnland[\"peilgebied\"] = pg_sp\n", - "Rijnland[\"peilgebied\"] = Rijnland[\"peilgebied\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Rijnland[\"peilgebied\"] = gpd.GeoDataFrame(Rijnland[\"peilgebied\"], geometry=\"geometry\")\n", - "Rijnland[\"peilgebied\"] = Rijnland[\"peilgebied\"].set_crs(\"EPSG:28992\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "streefpeil = pg_sp[[\"waterhoogte\", \"globalid\"]]\n", - "\n", - "streefpeil[\"geometry\"] = np.nan\n", - "streefpeil = gpd.GeoDataFrame(streefpeil)\n", - "Rijnland[\"streefpeil\"] = streefpeil" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# delete irrelvant data\n", - "variables = [\"peilgebiedpraktijk\", \"peilgebiedvigerend\", \"peilafwijkinggebied\"]\n", - "\n", - "for variable in variables:\n", - " if str(variable) in Rijnland:\n", - " del Rijnland[variable]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "Rijnland[\"peilgebied\"].plot()" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=Rijnland)" - ] - }, - { - "cell_type": "markdown", - "id": "24", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "# Check if the directory exists\n", - "if not os.path.exists(output_gpkg_path):\n", - " # If it doesn't exist, create it\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=Rijnland, output_gpkg_path=output_gpkg_path + \"/Rijnland\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py new file mode 100644 index 0000000..ef8b895 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py @@ -0,0 +1,171 @@ +# import packages and functions +import os + +import geopandas as gpd +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +from general_functions import * + +pd.set_option("display.max_columns", None) + + +# define relative paths +waterschap = "Rijnland" +path_Rijnland = "..\..\Data_preprocessed\Waterschappen\Rijnland\DataRijnland\DataRijnland.gpkg" +output_gpkg_path = "../../Data_postprocessed/Waterschappen/Rijnland" + + +Rijnland = read_gpkg_layers( + gpkg_path=path_Rijnland, + variables=[ + "stuw", + "gemaal", + "afsluitmiddel", + "duikersifonhevel", + "hydroobject", + "peilgebiedvigerend", + "peilgebiedpraktijk", + "peilafwijkinggebied", + "streefpeil", + ], +) + + +# # Rijnland + + +fig, ax = plt.subplots() +Rijnland["peilgebiedvigerend"].plot(ax=ax, color="blue") +Rijnland["peilgebiedpraktijk"].plot(ax=ax, color="orange") +Rijnland["peilafwijkinggebied"].plot(ax=ax, color="green") + + +Rijnland["stuw"] = Rijnland["stuw"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["gemaal"] = Rijnland["gemaal"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["afsluitmiddel"] = Rijnland["afsluitmiddel"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["hydroobject"] = Rijnland["hydroobject"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["duikersifonhevel"] = Rijnland["duikersifonhevel"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["peilgebiedpraktijk"] = Rijnland["peilgebiedpraktijk"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["peilafwijkinggebied"] = Rijnland["peilafwijkinggebied"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["peilgebiedvigerend"] = Rijnland["peilgebiedvigerend"][["code", "nen3610id", "globalid", "geometry"]] + + +# fix geometries +Rijnland["peilgebiedvigerend"]["geometry"] = Rijnland["peilgebiedvigerend"].buffer(distance=0) +Rijnland["peilgebiedpraktijk"]["geometry"] = Rijnland["peilgebiedpraktijk"].buffer(distance=0) +Rijnland["peilafwijkinggebied"]["geometry"] = Rijnland["peilafwijkinggebied"].buffer(distance=0) + + +# peilgebied = burn_in_peilgebieden(base_layer = Rijnland['peilgebiedpraktijk'], +# overlay_layer = Rijnland['peilafwijkinggebied'], +# plot = True) +# Rijnland['peilgebied'] = gpd.GeoDataFrame(peilgebied) + +peilgebied = burn_in_peilgebieden( + base_layer=Rijnland["peilgebiedvigerend"], overlay_layer=Rijnland["peilgebiedpraktijk"], plot=True +) + + +peilgebied = gpd.GeoDataFrame(peilgebied) +peilgebied = peilgebied[peilgebied.geometry.type.isin(["Polygon", "MultiPolygon"])] # only select polygons + +Rijnland["peilgebied"] = gpd.GeoDataFrame(peilgebied) + + +gpd.GeoDataFrame(peilgebied).to_file("Rijnland_test_kan_weg.shp") +# peilgebied + + +# # Explode the multipolygons into separate parts +# exploded_peilgebied = Rijnland['peilgebied'].explode('geometry') + +# # Check if each part is a single polygon +# is_simple_polygon = exploded_peilgebied['geometry'].apply(lambda geom: geom.type == 'Polygon') + +# # Select only the simple polygons from the exploded GeoDataFrame +# simple_peilgebied = exploded_peilgebied[is_simple_polygon] +# simple_peilgebied + + +# #convert multi polygon to single polygon +# Rijnland['peilgebied'] = Rijnland['peilgebied'].explode() +# Rijnland['peilgebied']['nen3610id'] = 'dummy_nen3610id_duikersifonhevel_' + Rijnland['peilgebied'].index.astype(str) + + +Rijnland["streefpeil"].peilgebiedpraktijkid.fillna(value=Rijnland["streefpeil"]["peilgebiedvigerendid"], inplace=True) +# Rijnland['streefpeil'].drop_duplicates(subset=['peilgebiedpraktijkid'], inplace=True) + + +# get rid of irrelevant streefpeilen, which otherwise results in too many overlapped peilgebieden +filter_condition = Rijnland["streefpeil"]["soortstreefpeil"].isin( + ["omer", "ondergrens"] +) #'omer' for all rows where something of zomer, Zomer, dynamische zomer, etc, is used +kept_rows = Rijnland["streefpeil"][filter_condition] + +other_rows = Rijnland["streefpeil"][~filter_condition].drop_duplicates(subset=["peilgebiedpraktijkid"]) +Rijnland["streefpeil"] = pd.concat([kept_rows, other_rows]) + + +# Rijnland['streefpeil'].peilafwijkinggebiedid.fillna(value=Rijnland['streefpeil']['peilgebiedpraktijkid'], inplace=True) +# Rijnland['streefpeil'].peilgebiedpraktijkid.fillna(value=Rijnland['streefpeil']['peilgebiedvigerendid'], inplace=True) + +pg_sp = pd.merge( + left=peilgebied, + right=Rijnland["streefpeil"], + left_on="globalid", + right_on="peilgebiedpraktijkid", + suffixes=("", "_streefpeil"), +) + +pg_sp["geometry"] = gpd.GeoDataFrame(geometry=pg_sp["geometry"]).reset_index(drop=True) +# pg_sp = pg_sp.explode('geometry',ignore_index=True) + + +# gpd.GeoDataFrame(pg_sp.loc[pg_sp.code != 'PBS_WW-25AS'], geometry='geometry').set_crs('EPSG:28992').to_file('peilgebieden_Rijnland.gpkg', driver='GPKG') + + +# there are duplicate codes, nen3610ids and globalids due to the exploded function. Rename these. +pg_sp["nen3610id"] = "dummy_nen3610id_peilgebied_" + pg_sp.index.astype(str) +pg_sp["code"] = "dummy_code_peilgebied_" + pg_sp.index.astype(str) +pg_sp["globalid"] = "dummy_globalid_peilgebied_" + pg_sp.index.astype(str) + +Rijnland["peilgebied"] = pg_sp +Rijnland["peilgebied"] = Rijnland["peilgebied"][["code", "nen3610id", "globalid", "geometry"]] +Rijnland["peilgebied"] = gpd.GeoDataFrame(Rijnland["peilgebied"], geometry="geometry") +Rijnland["peilgebied"] = Rijnland["peilgebied"].set_crs("EPSG:28992") + + +streefpeil = pg_sp[["waterhoogte", "globalid"]] + +streefpeil["geometry"] = np.nan +streefpeil = gpd.GeoDataFrame(streefpeil) +Rijnland["streefpeil"] = streefpeil + + +# delete irrelvant data +variables = ["peilgebiedpraktijk", "peilgebiedvigerend", "peilafwijkinggebied"] + +for variable in variables: + if str(variable) in Rijnland: + del Rijnland[variable] + + +Rijnland["peilgebied"].plot() + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=Rijnland) + + +# ### Store data + + +# Check if the directory exists +if not os.path.exists(output_gpkg_path): + # If it doesn't exist, create it + os.makedirs(output_gpkg_path) + +store_data(waterschap=Rijnland, output_gpkg_path=output_gpkg_path + "/Rijnland") diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.ipynb deleted file mode 100644 index 1aedd84..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.ipynb +++ /dev/null @@ -1,461 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import os\n", - "\n", - "import geopandas as gpd\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "from general_functions import *" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "pd.set_option(\"display.max_columns\", None)" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "# WSRL" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"WSRL\"\n", - "data_path = \"../../Data_preprocessed/Waterschappen/WSRL\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/WSRL\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# WSRL has delivered the data per catchment. Loop through each catchment, and concat all data to a single dictionary\n", - "WSRL = {}\n", - "\n", - "for root, dirs, files in os.walk(data_path):\n", - " for file in files:\n", - " if file.endswith(\".gpkg\"):\n", - " gpkg_path = os.path.join(root, file)\n", - "\n", - " if WSRL == {}:\n", - " WSRL = read_gpkg_layers(\n", - " gpkg_path=gpkg_path, variables=[\"Stuw\", \"Gemaal\", \"Hydroobject\", \"Duikersifonhevel\"]\n", - " )\n", - " else:\n", - " temp_WSRL = read_gpkg_layers(\n", - " gpkg_path=gpkg_path, variables=[\"Stuw\", \"Gemaal\", \"Hydroobject\", \"Duikersifonhevel\"]\n", - " )\n", - " for variable in WSRL.keys():\n", - " WSRL[variable] = pd.concat([WSRL[variable], temp_WSRL[variable]]).reset_index(drop=True)" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "Additional data is given in another gpkg, which includes the peilgebieden" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "gdb_path = r\"..\\..\\Data_preprocessed\\Waterschappen\\WSRL\\OverigeGegevens.gdb\"\n", - "WSRL_gdb = read_gpkg_layers(gpkg_path=gdb_path, variables=[\"PeilgebiedenPraktijk\"])\n", - "\n", - "# add the gdb to the dict\n", - "# WSRL['peilgebiedafwijking'] = WSRL_gdb['Peilafwijkingen']\n", - "WSRL[\"peilgebiedpraktijk\"] = WSRL_gdb[\"PeilgebiedenPraktijk\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# change names\n", - "WSRL[\"stuw\"] = WSRL.pop(\"Stuw\")\n", - "WSRL[\"gemaal\"] = WSRL.pop(\"Gemaal\")\n", - "WSRL[\"hydroobject\"] = WSRL.pop(\"Hydroobject\")\n", - "WSRL[\"duikersifonhevel\"] = WSRL.pop(\"Duikersifonhevel\")" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": {}, - "source": [ - "### Adjust column names" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# add gemaal information\n", - "WSRL[\"peilgebiedpraktijk\"][\"waterhoogte\"] = np.nan\n", - "WSRL[\"peilgebiedpraktijk\"].loc[\n", - " WSRL[\"peilgebiedpraktijk\"][\"TYPE_PEILBEHEER\"] == \"seizoensgebonden\", \"waterhoogte\"\n", - "].fillna(WSRL[\"peilgebiedpraktijk\"][\"ZOMERPEIL\"], inplace=True)\n", - "WSRL[\"peilgebiedpraktijk\"].loc[WSRL[\"peilgebiedpraktijk\"][\"TYPE_PEILBEHEER\"] == \"flexibel\", \"waterhoogte\"].fillna(\n", - " WSRL[\"peilgebiedpraktijk\"][\"ZOMERPEIL\"], inplace=True\n", - ")\n", - "WSRL[\"peilgebiedpraktijk\"].loc[WSRL[\"peilgebiedpraktijk\"][\"TYPE_PEILBEHEER\"] == \"natuurlijk\", \"waterhoogte\"].fillna(\n", - " WSRL[\"peilgebiedpraktijk\"][\"ZOMERPEIL\"], inplace=True\n", - ")\n", - "WSRL[\"peilgebiedpraktijk\"].loc[WSRL[\"peilgebiedpraktijk\"][\"TYPE_PEILBEHEER\"] == \"vast\", \"waterhoogte\"].fillna(\n", - " WSRL[\"peilgebiedpraktijk\"][\"VASTPEIL\"], inplace=True\n", - ")\n", - "WSRL[\"peilgebiedpraktijk\"].loc[WSRL[\"peilgebiedpraktijk\"][\"TYPE_PEILBEHEER\"] == \"streef\", \"waterhoogte\"].fillna(\n", - " WSRL[\"peilgebiedpraktijk\"][\"STREEFPEIL\"], inplace=True\n", - ")\n", - "\n", - "# niet elke kolom is ingevuld met de paren die je zou verwachten. Vul voor de ontbrekende waarden de volgende waarden in:\n", - "WSRL[\"peilgebiedpraktijk\"].loc[:, \"waterhoogte\"].fillna(WSRL[\"peilgebiedpraktijk\"][\"STREEFPEIL\"], inplace=True)\n", - "WSRL[\"peilgebiedpraktijk\"].loc[:, \"waterhoogte\"].fillna(WSRL[\"peilgebiedpraktijk\"][\"VASTPEIL\"], inplace=True)\n", - "WSRL[\"peilgebiedpraktijk\"].loc[:, \"waterhoogte\"].fillna(WSRL[\"peilgebiedpraktijk\"][\"ZOMERPEIL\"], inplace=True)\n", - "WSRL[\"peilgebiedpraktijk\"].loc[:, \"waterhoogte\"].fillna(WSRL[\"peilgebiedpraktijk\"][\"MIN_PEIL\"], inplace=True)\n", - "WSRL[\"peilgebiedpraktijk\"].loc[:, \"waterhoogte\"].fillna(WSRL[\"peilgebiedpraktijk\"][\"WINTERPEIL\"], inplace=True)\n", - "WSRL[\"peilgebiedpraktijk\"].loc[:, \"waterhoogte\"].fillna(WSRL[\"peilgebiedpraktijk\"][\"MAX_PEIL\"], inplace=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# Add gemaal information\n", - "# determine aanvoer en afvoer gemalen\n", - "WSRL[\"gemaal\"][\"func_aanvoer\"], WSRL[\"gemaal\"][\"func_afvoer\"], WSRL[\"gemaal\"][\"func_circulatie\"] = (\n", - " False,\n", - " False,\n", - " False,\n", - ") # default is False\n", - "WSRL[\"gemaal\"][\"functiegemaal\"] = WSRL[\"gemaal\"][\"naam\"].astype(str)\n", - "\n", - "\n", - "WSRL[\"gemaal\"].loc[WSRL[\"gemaal\"].functiegemaal.str.contains(\"Afvoer|Onbekend|Af-|Onderbemaling\"), \"func_afvoer\"] = True\n", - "WSRL[\"gemaal\"].loc[WSRL[\"gemaal\"].functiegemaal.str.contains(\"Aanvoer|Opmaling\"), \"func_aanvoer\"] = True\n", - "WSRL[\"gemaal\"].loc[WSRL[\"gemaal\"].functiegemaal.str.contains(\"Doorspoelgemaal\"), \"func_circulatie\"] = True\n", - "WSRL[\"gemaal\"].loc[\n", - " (WSRL[\"gemaal\"].func_afvoer is False)\n", - " & (WSRL[\"gemaal\"].func_aanvoer is False)\n", - " & (WSRL[\"gemaal\"].func_circulatie is False),\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# discard irrelevant data of WSRL, and create a uniform dataset compared to the other waterschappen\n", - "WSRL[\"stuw\"] = WSRL[\"stuw\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "WSRL[\"gemaal\"] = WSRL[\"gemaal\"][\n", - " [\"code\", \"globalid\", \"nen3610id\", \"func_afvoer\", \"func_aanvoer\", \"func_circulatie\", \"geometry\"]\n", - "]\n", - "WSRL[\"hydroobject\"] = WSRL[\"hydroobject\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "WSRL[\"duikersifonhevel\"] = WSRL[\"duikersifonhevel\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# gpd.overlay(WSRL['peilgebiedpraktijk'], WSRL['peilgebiedafwijking'], how='union', keep_geom_type=False).to_file('WSRL_kan_weg.gpkg')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "fig, ax = plt.subplots()\n", - "WSRL[\"peilgebiedpraktijk\"].plot(ax=ax, color=\"cornflowerblue\", label=\"Peilgebiedpraktijk\")\n", - "# WSRL['peilgebiedafwijking'].plot(ax=ax, color='blue', label = 'Peilgebiedafwijking')\n", - "ax.legend()\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "def intersect_using_spatial_index(peilgebied_praktijk, peilgebied_afwijking, check):\n", - " \"\"\"\n", - " Conduct spatial intersection using spatial index for candidates GeoDataFrame to make queries faster.\n", - "\n", - " Note, with this function, you can have multiple Polygons in the 'intersecting_gdf' and it will return all the points\n", - " intersect with ANY of those geometries.\n", - " \"\"\"\n", - " peilgebied_praktijk_sindex = peilgebied_praktijk.sindex\n", - " possible_matches_index = []\n", - "\n", - " # 'itertuples()' function is a faster version of 'iterrows()'\n", - " for other in peilgebied_afwijking.itertuples():\n", - " bounds = other.geometry.bounds\n", - " c = list(peilgebied_praktijk_sindex.intersection(bounds))\n", - " possible_matches_index += c\n", - "\n", - " # Get unique candidates\n", - " unique_candidate_matches = list(set(possible_matches_index))\n", - " possible_matches = peilgebied_praktijk.iloc[unique_candidate_matches]\n", - "\n", - " # Conduct the actual intersect\n", - " overlapping_pg_praktijk = possible_matches.loc[\n", - " possible_matches.intersects(peilgebied_afwijking.unary_union)\n", - " ] # the entire peilgebied praktijk polygons\n", - "\n", - " # remove the peilgebied afwijking from the peilgebied praktijk\n", - " intersection = gpd.overlay(overlapping_pg_praktijk, peilgebied_afwijking, how=\"intersection\")\n", - " overlapping_updated = gpd.overlay(\n", - " peilgebied_praktijk, intersection, how=\"symmetric_difference\"\n", - " ) # check of het difference of symmetrical difference moet zijn!#remove the difference between pg_praktijk and pg_afwijking\n", - " peilgebied = overlapping_updated.append(\n", - " intersection, ignore_index=True\n", - " ) # add the removed difference, but now only the intersected part of pg_afwijking\n", - "\n", - " if check:\n", - " peilgebied_praktijk.to_file(\"Checks/Rivierenland/peilgebied_praktijk.gpkg\", driver=\"GPKG\")\n", - " peilgebied_afwijking.to_file(\"Checks/Rivierenland/peilgebied_afwijking.gpkg\", driver=\"GPKG\")\n", - "\n", - " intersection.to_file(\"Checks/Rivierenland/intersection.gpkg\", driver=\"GPKG\")\n", - " overlapping_updated.to_file(\"Checks/Rivierenland/overlapping_updated.gpkg\", driver=\"GPKG\")\n", - " peilgebied.to_file(\"Checks/Rivierenland/peilgebied.gpkg\", driver=\"GPKG\")\n", - "\n", - " return peilgebied" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "# peilgebied = intersect_using_spatial_index(peilgebied_praktijk = WSRL['peilgebiedpraktijk'],\n", - "# peilgebied_afwijking = WSRL['peilgebiedafwijking'],\n", - "# check = False)\n", - "peilgebied = WSRL[\"peilgebiedpraktijk\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# WSRL['peilgebiedpraktijk'] = WSRL['peilgebiedpraktijk'][['ZOMERPEIL', 'WINTERPEIL', 'MIN_PEIL', 'MAX_PEIL', 'STREEFPEIL', 'VASTPEIL', 'TYPE_PEILBEHEER', 'geometry']]\n", - "# peilgebied['waterhoogte'] = np.nan\n", - "\n", - "# for fill_column in ['ZOMERPEIL', 'STREEFPEIL', 'VASTPEIL', 'MAX_PEIL', 'MIN_PEIL', 'WINTERPEIL']:\n", - "# peilgebied.waterhoogte.fillna(value = peilgebied[fill_column], inplace=True)\n", - "\n", - "# WSRL['peilgebiedpraktijk'].waterhoogte.fillna(value=WSRL['peilgebiedpraktijk'][['ZOMERPEIL', 'STREEFPEIL', 'VASTPEIL', 'MAX_PEIL', 'MIN_PEIL']])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# store the ontbrekende_streefpeilen to a gpkg, check validity\n", - "ontbrekende_streefpeilen = peilgebied[peilgebied.waterhoogte.isna()]\n", - "# ontbrekende_streefpeilen.to_file('Checks/Rivierenland/peilgebieden_zonder_streefpeil.gpkg', driver='GPKG')\n", - "peilgebied.plot()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "peilgebied[\"CODE_0\"] = peilgebied.CODE # change names for later purposes\n", - "peilgebied.CODE = np.nan\n", - "\n", - "# add and fill the final columns\n", - "columns_to_check = [\"CODE_0\"]\n", - "\n", - "for fill_column in [\"CODE_0\"]:\n", - " peilgebied.CODE.fillna(value=peilgebied[fill_column], inplace=True)\n", - "\n", - "peilgebied[\"globalid\"] = peilgebied.index.astype(str)\n", - "peilgebied[\"nen3610id\"] = \"dummy_nen3610id_peilgebied_\" + peilgebied.index.astype(str)\n", - "\n", - "# create streefpeil key\n", - "WSRL[\"streefpeil\"] = peilgebied[[\"waterhoogte\", \"globalid\"]]\n", - "WSRL[\"streefpeil\"][\"geometry\"] = np.nan\n", - "WSRL[\"streefpeil\"] = gpd.GeoDataFrame(WSRL[\"streefpeil\"])\n", - "\n", - "# only select the relevant columns\n", - "peilgebied = peilgebied[[\"CODE\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "WSRL[\"peilgebied\"] = peilgebied # add to the dict" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# discard irrelvant data\n", - "variables = [\"peilgebiedpraktijk\", \"peilgebiedafwijking\"]\n", - "\n", - "for variable in variables:\n", - " if str(variable) in WSRL:\n", - " del WSRL[variable]" - ] - }, - { - "cell_type": "markdown", - "id": "21", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=WSRL)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "24", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "# Check if the directory exists\n", - "if not os.path.exists(output_gpkg_path):\n", - " # If it doesn't exist, create it\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=WSRL, output_gpkg_path=output_gpkg_path + \"/WSRL\")" - ] - }, - { - "cell_type": "raw", - "id": "26", - "metadata": {}, - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py new file mode 100644 index 0000000..b7eb5a2 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py @@ -0,0 +1,240 @@ +# import packages and functions +import os + +import geopandas as gpd +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +from general_functions import * + +pd.set_option("display.max_columns", None) + + +# # WSRL + + +# define relative paths +waterschap = "WSRL" +data_path = "../../Data_preprocessed/Waterschappen/WSRL" +output_gpkg_path = "../../Data_postprocessed/Waterschappen/WSRL" + + +# WSRL has delivered the data per catchment. Loop through each catchment, and concat all data to a single dictionary +WSRL = {} + +for root, dirs, files in os.walk(data_path): + for file in files: + if file.endswith(".gpkg"): + gpkg_path = os.path.join(root, file) + + if WSRL == {}: + WSRL = read_gpkg_layers( + gpkg_path=gpkg_path, variables=["Stuw", "Gemaal", "Hydroobject", "Duikersifonhevel"] + ) + else: + temp_WSRL = read_gpkg_layers( + gpkg_path=gpkg_path, variables=["Stuw", "Gemaal", "Hydroobject", "Duikersifonhevel"] + ) + for variable in WSRL.keys(): + WSRL[variable] = pd.concat([WSRL[variable], temp_WSRL[variable]]).reset_index(drop=True) + + +# Additional data is given in another gpkg, which includes the peilgebieden + + +gdb_path = r"..\..\Data_preprocessed\Waterschappen\WSRL\OverigeGegevens.gdb" +WSRL_gdb = read_gpkg_layers(gpkg_path=gdb_path, variables=["PeilgebiedenPraktijk"]) + +# add the gdb to the dict +# WSRL['peilgebiedafwijking'] = WSRL_gdb['Peilafwijkingen'] +WSRL["peilgebiedpraktijk"] = WSRL_gdb["PeilgebiedenPraktijk"] + + +# change names +WSRL["stuw"] = WSRL.pop("Stuw") +WSRL["gemaal"] = WSRL.pop("Gemaal") +WSRL["hydroobject"] = WSRL.pop("Hydroobject") +WSRL["duikersifonhevel"] = WSRL.pop("Duikersifonhevel") + + +# ### Adjust column names + + +# add gemaal information +WSRL["peilgebiedpraktijk"]["waterhoogte"] = np.nan +WSRL["peilgebiedpraktijk"].loc[ + WSRL["peilgebiedpraktijk"]["TYPE_PEILBEHEER"] == "seizoensgebonden", "waterhoogte" +].fillna(WSRL["peilgebiedpraktijk"]["ZOMERPEIL"], inplace=True) +WSRL["peilgebiedpraktijk"].loc[WSRL["peilgebiedpraktijk"]["TYPE_PEILBEHEER"] == "flexibel", "waterhoogte"].fillna( + WSRL["peilgebiedpraktijk"]["ZOMERPEIL"], inplace=True +) +WSRL["peilgebiedpraktijk"].loc[WSRL["peilgebiedpraktijk"]["TYPE_PEILBEHEER"] == "natuurlijk", "waterhoogte"].fillna( + WSRL["peilgebiedpraktijk"]["ZOMERPEIL"], inplace=True +) +WSRL["peilgebiedpraktijk"].loc[WSRL["peilgebiedpraktijk"]["TYPE_PEILBEHEER"] == "vast", "waterhoogte"].fillna( + WSRL["peilgebiedpraktijk"]["VASTPEIL"], inplace=True +) +WSRL["peilgebiedpraktijk"].loc[WSRL["peilgebiedpraktijk"]["TYPE_PEILBEHEER"] == "streef", "waterhoogte"].fillna( + WSRL["peilgebiedpraktijk"]["STREEFPEIL"], inplace=True +) + +# niet elke kolom is ingevuld met de paren die je zou verwachten. Vul voor de ontbrekende waarden de volgende waarden in: +WSRL["peilgebiedpraktijk"].loc[:, "waterhoogte"].fillna(WSRL["peilgebiedpraktijk"]["STREEFPEIL"], inplace=True) +WSRL["peilgebiedpraktijk"].loc[:, "waterhoogte"].fillna(WSRL["peilgebiedpraktijk"]["VASTPEIL"], inplace=True) +WSRL["peilgebiedpraktijk"].loc[:, "waterhoogte"].fillna(WSRL["peilgebiedpraktijk"]["ZOMERPEIL"], inplace=True) +WSRL["peilgebiedpraktijk"].loc[:, "waterhoogte"].fillna(WSRL["peilgebiedpraktijk"]["MIN_PEIL"], inplace=True) +WSRL["peilgebiedpraktijk"].loc[:, "waterhoogte"].fillna(WSRL["peilgebiedpraktijk"]["WINTERPEIL"], inplace=True) +WSRL["peilgebiedpraktijk"].loc[:, "waterhoogte"].fillna(WSRL["peilgebiedpraktijk"]["MAX_PEIL"], inplace=True) + + +# Add gemaal information +# determine aanvoer en afvoer gemalen +WSRL["gemaal"]["func_aanvoer"], WSRL["gemaal"]["func_afvoer"], WSRL["gemaal"]["func_circulatie"] = ( + False, + False, + False, +) # default is False +WSRL["gemaal"]["functiegemaal"] = WSRL["gemaal"]["naam"].astype(str) + + +WSRL["gemaal"].loc[WSRL["gemaal"].functiegemaal.str.contains("Afvoer|Onbekend|Af-|Onderbemaling"), "func_afvoer"] = True +WSRL["gemaal"].loc[WSRL["gemaal"].functiegemaal.str.contains("Aanvoer|Opmaling"), "func_aanvoer"] = True +WSRL["gemaal"].loc[WSRL["gemaal"].functiegemaal.str.contains("Doorspoelgemaal"), "func_circulatie"] = True +WSRL["gemaal"].loc[ + (WSRL["gemaal"].func_afvoer is False) + & (WSRL["gemaal"].func_aanvoer is False) + & (WSRL["gemaal"].func_circulatie is False), + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + + +# discard irrelevant data of WSRL, and create a uniform dataset compared to the other waterschappen +WSRL["stuw"] = WSRL["stuw"][["code", "globalid", "nen3610id", "geometry"]] +WSRL["gemaal"] = WSRL["gemaal"][ + ["code", "globalid", "nen3610id", "func_afvoer", "func_aanvoer", "func_circulatie", "geometry"] +] +WSRL["hydroobject"] = WSRL["hydroobject"][["code", "globalid", "nen3610id", "geometry"]] +WSRL["duikersifonhevel"] = WSRL["duikersifonhevel"][["code", "globalid", "nen3610id", "geometry"]] + + +# gpd.overlay(WSRL['peilgebiedpraktijk'], WSRL['peilgebiedafwijking'], how='union', keep_geom_type=False).to_file('WSRL_kan_weg.gpkg') + + +fig, ax = plt.subplots() +WSRL["peilgebiedpraktijk"].plot(ax=ax, color="cornflowerblue", label="Peilgebiedpraktijk") +# WSRL['peilgebiedafwijking'].plot(ax=ax, color='blue', label = 'Peilgebiedafwijking') +ax.legend() +plt.show() + + +def intersect_using_spatial_index(peilgebied_praktijk, peilgebied_afwijking, check): + """ + Conduct spatial intersection using spatial index for candidates GeoDataFrame to make queries faster. + + Note, with this function, you can have multiple Polygons in the 'intersecting_gdf' and it will return all the points + intersect with ANY of those geometries. + """ + peilgebied_praktijk_sindex = peilgebied_praktijk.sindex + possible_matches_index = [] + + # 'itertuples()' function is a faster version of 'iterrows()' + for other in peilgebied_afwijking.itertuples(): + bounds = other.geometry.bounds + c = list(peilgebied_praktijk_sindex.intersection(bounds)) + possible_matches_index += c + + # Get unique candidates + unique_candidate_matches = list(set(possible_matches_index)) + possible_matches = peilgebied_praktijk.iloc[unique_candidate_matches] + + # Conduct the actual intersect + overlapping_pg_praktijk = possible_matches.loc[ + possible_matches.intersects(peilgebied_afwijking.unary_union) + ] # the entire peilgebied praktijk polygons + + # remove the peilgebied afwijking from the peilgebied praktijk + intersection = gpd.overlay(overlapping_pg_praktijk, peilgebied_afwijking, how="intersection") + overlapping_updated = gpd.overlay( + peilgebied_praktijk, intersection, how="symmetric_difference" + ) # check of het difference of symmetrical difference moet zijn!#remove the difference between pg_praktijk and pg_afwijking + peilgebied = overlapping_updated.append( + intersection, ignore_index=True + ) # add the removed difference, but now only the intersected part of pg_afwijking + + if check: + peilgebied_praktijk.to_file("Checks/Rivierenland/peilgebied_praktijk.gpkg", driver="GPKG") + peilgebied_afwijking.to_file("Checks/Rivierenland/peilgebied_afwijking.gpkg", driver="GPKG") + + intersection.to_file("Checks/Rivierenland/intersection.gpkg", driver="GPKG") + overlapping_updated.to_file("Checks/Rivierenland/overlapping_updated.gpkg", driver="GPKG") + peilgebied.to_file("Checks/Rivierenland/peilgebied.gpkg", driver="GPKG") + + return peilgebied + + +# peilgebied = intersect_using_spatial_index(peilgebied_praktijk = WSRL['peilgebiedpraktijk'], +# peilgebied_afwijking = WSRL['peilgebiedafwijking'], +# check = False) +peilgebied = WSRL["peilgebiedpraktijk"] + + +# WSRL['peilgebiedpraktijk'] = WSRL['peilgebiedpraktijk'][['ZOMERPEIL', 'WINTERPEIL', 'MIN_PEIL', 'MAX_PEIL', 'STREEFPEIL', 'VASTPEIL', 'TYPE_PEILBEHEER', 'geometry']] +# peilgebied['waterhoogte'] = np.nan + +# for fill_column in ['ZOMERPEIL', 'STREEFPEIL', 'VASTPEIL', 'MAX_PEIL', 'MIN_PEIL', 'WINTERPEIL']: +# peilgebied.waterhoogte.fillna(value = peilgebied[fill_column], inplace=True) + +# WSRL['peilgebiedpraktijk'].waterhoogte.fillna(value=WSRL['peilgebiedpraktijk'][['ZOMERPEIL', 'STREEFPEIL', 'VASTPEIL', 'MAX_PEIL', 'MIN_PEIL']]) + + +# store the ontbrekende_streefpeilen to a gpkg, check validity +ontbrekende_streefpeilen = peilgebied[peilgebied.waterhoogte.isna()] +# ontbrekende_streefpeilen.to_file('Checks/Rivierenland/peilgebieden_zonder_streefpeil.gpkg', driver='GPKG') +peilgebied.plot() + + +peilgebied["CODE_0"] = peilgebied.CODE # change names for later purposes +peilgebied.CODE = np.nan + +# add and fill the final columns +columns_to_check = ["CODE_0"] + +for fill_column in ["CODE_0"]: + peilgebied.CODE.fillna(value=peilgebied[fill_column], inplace=True) + +peilgebied["globalid"] = peilgebied.index.astype(str) +peilgebied["nen3610id"] = "dummy_nen3610id_peilgebied_" + peilgebied.index.astype(str) + +# create streefpeil key +WSRL["streefpeil"] = peilgebied[["waterhoogte", "globalid"]] +WSRL["streefpeil"]["geometry"] = np.nan +WSRL["streefpeil"] = gpd.GeoDataFrame(WSRL["streefpeil"]) + +# only select the relevant columns +peilgebied = peilgebied[["CODE", "globalid", "nen3610id", "geometry"]] +WSRL["peilgebied"] = peilgebied # add to the dict + + +# discard irrelvant data +variables = ["peilgebiedpraktijk", "peilgebiedafwijking"] + +for variable in variables: + if str(variable) in WSRL: + del WSRL[variable] + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=WSRL) + + +# ### Store data + + +# Check if the directory exists +if not os.path.exists(output_gpkg_path): + # If it doesn't exist, create it + os.makedirs(output_gpkg_path) + +store_data(waterschap=WSRL, output_gpkg_path=output_gpkg_path + "/WSRL") diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.ipynb deleted file mode 100644 index de0aa81..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.ipynb +++ /dev/null @@ -1,346 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import os\n", - "\n", - "import geopandas as gpd\n", - "import numpy as np\n", - "import pandas as pd" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "from general_functions import *" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "pd.set_option(\"display.max_columns\", None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Scheldestromen\"\n", - "path_Scheldestromen = \"..\\..\\Data_preprocessed\\Waterschappen\\Scheldestromen\\Scheldestromen.gpkg\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/Scheldestromen\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "Scheldestromen = read_gpkg_layers(\n", - " gpkg_path=path_Scheldestromen,\n", - " variables=[\n", - " \"stuw\",\n", - " \"gemaal\",\n", - " # 'afsluitmiddel',\n", - " \"duikersifonhevel\",\n", - " \"hydroobject\",\n", - " # 'peilgebiedvigerend',\n", - " # 'peilgebiedpraktijk',\n", - " # 'peilafwijkinggebied',\n", - " # 'streefpeil',\n", - " ],\n", - " engine=\"pyogrio\",\n", - ")\n", - "\n", - "# the peilgebieden and streefpeilen do not contain overlapping values. Scheldestromen has delivered additional files as shapes\n", - "Scheldestromen[\"peilgebied\"] = gpd.read_file(\n", - " \"..\\..\\Data_preprocessed\\Waterschappen\\Scheldestromen\\Praktijkpeilgebieden_20231204.shp\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "Scheldestromen[\"peilgebied\"][\"code\"] = Scheldestromen[\"peilgebied\"][\"GPGIDENT\"]\n", - "Scheldestromen[\"peilgebied\"][\"globalid\"] = Scheldestromen[\"peilgebied\"][\"GLOBALID\"]\n", - "Scheldestromen[\"peilgebied\"][\"waterhoogte\"] = Scheldestromen[\"peilgebied\"][\"GPGZP\"]\n", - "Scheldestromen[\"peilgebied\"][\"nen3610id\"] = \"dummy_nen3610id_peilgebied_\" + Scheldestromen[\"peilgebied\"].index.astype(\n", - " str\n", - ")\n", - "\n", - "Scheldestromen[\"streefpeil\"] = gpd.GeoDataFrame()\n", - "Scheldestromen[\"streefpeil\"][\"waterhoogte\"] = Scheldestromen[\"peilgebied\"][\"waterhoogte\"]\n", - "Scheldestromen[\"streefpeil\"][\"globalid\"] = Scheldestromen[\"peilgebied\"][\"globalid\"]\n", - "Scheldestromen[\"streefpeil\"][\"geometry\"] = np.nan\n", - "\n", - "\n", - "Scheldestromen[\"peilgebied\"] = Scheldestromen[\"peilgebied\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# convert multiz points to points\n", - "Scheldestromen[\"stuw\"].geometry = Scheldestromen[\"stuw\"].centroid\n", - "Scheldestromen[\"gemaal\"].geometry = Scheldestromen[\"gemaal\"].centroid" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "# Scheldestromen" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# stuw\n", - "Scheldestromen[\"stuw\"] = Scheldestromen[\"stuw\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "\n", - "# gemaal\n", - "Scheldestromen[\"gemaal\"] = Scheldestromen[\"gemaal\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Scheldestromen[\"gemaal\"][\"code\"] = \"dummy_code_gemaal_\" + Scheldestromen[\"gemaal\"].index.astype(str)\n", - "\n", - "# hydroobject\n", - "Scheldestromen[\"hydroobject\"][\"code\"] = Scheldestromen[\"hydroobject\"][\"naam\"]\n", - "Scheldestromen[\"hydroobject\"] = Scheldestromen[\"hydroobject\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "\n", - "# duikersifonhevel\n", - "Scheldestromen[\"duikersifonhevel\"] = Scheldestromen[\"duikersifonhevel\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# pd.merge(left = Scheldestromen['peilgebiedpraktijk'],\n", - "# right = Scheldestromen['streefpeil'],\n", - "# left_on = 'globalid',\n", - "# right_on = 'peilgebiedpraktijkid')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "Scheldestromen[\"stuw\"] = Scheldestromen[\"stuw\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Scheldestromen[\"gemaal\"] = Scheldestromen[\"gemaal\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Scheldestromen[\"hydroobject\"] = Scheldestromen[\"hydroobject\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "Scheldestromen[\"duikersifonhevel\"] = Scheldestromen[\"duikersifonhevel\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# Scheldestromen['peilgebiedpraktijk']['geometry'] = Scheldestromen['peilgebiedpraktijk'].buffer(distance = 0)\n", - "# Scheldestromen['peilafwijkinggebied']['geometry'] = Scheldestromen['peilafwijkinggebied'].buffer(distance = 0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# peilgebied = pd.merge(left = Scheldestromen['streefpeil'],\n", - "# right = Scheldestromen['peilgebiedpraktijk'],\n", - "# left_on = 'peilgebiedpraktijkid',\n", - "# right_on = 'globalid')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# streefpeil = gpd.GeoDataFrame()\n", - "# streefpeil['waterhoogte'] = peilgebied['waterhoogte']\n", - "# streefpeil['globalid'] = peilgebied['peilgebiedpraktijkid']\n", - "# streefpeil['geometry'] = np.nan\n", - "# Scheldestromen['streefpeil'] = gpd.GeoDataFrame(streefpeil)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# Scheldestromen['peilgebied'] = gpd.GeoDataFrame()\n", - "# Scheldestromen['peilgebied'][['code', 'nen3610id', 'globalid', 'geometry']] = peilgebied[['code', 'nen3610id_y', 'globalid_y', 'geometry_y']]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# delete irrelvant data\n", - "variables = [\"peilgebiedpraktijk\", \"peilgebiedvigerend\", \"peilafwijkinggebied\"]\n", - "\n", - "for variable in variables:\n", - " if str(variable) in Scheldestromen:\n", - " del Scheldestromen[variable]" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=Scheldestromen)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "for i in range(len(Scheldestromen[\"peilgebied\"])):\n", - " if Scheldestromen[\"peilgebied\"][\"geometry\"].at[i].geom_type == \"Polygon\":\n", - " Scheldestromen[\"peilgebied\"].loc[i, \"geometry\"].plot()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "merged = pd.merge(left=Scheldestromen[\"peilgebied\"], right=Scheldestromen[\"streefpeil\"], on=\"globalid\")\n", - "\n", - "merged[merged.waterhoogte.isna()]" - ] - }, - { - "cell_type": "markdown", - "id": "20", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "# Check if the directory exists\n", - "if not os.path.exists(output_gpkg_path):\n", - " # If it doesn't exist, create it\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=Scheldestromen, output_gpkg_path=output_gpkg_path + \"/Scheldestromen\")" - ] - }, - { - "cell_type": "raw", - "id": "22", - "metadata": {}, - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "Scheldestromen[\"hydroobject\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py new file mode 100644 index 0000000..6ccca61 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py @@ -0,0 +1,147 @@ +# import packages and functions +import os + +import geopandas as gpd +import numpy as np +import pandas as pd +from general_functions import * + +pd.set_option("display.max_columns", None) + + +# define relative paths +waterschap = "Scheldestromen" +path_Scheldestromen = "..\..\Data_preprocessed\Waterschappen\Scheldestromen\Scheldestromen.gpkg" +output_gpkg_path = "../../Data_postprocessed/Waterschappen/Scheldestromen" + + +Scheldestromen = read_gpkg_layers( + gpkg_path=path_Scheldestromen, + variables=[ + "stuw", + "gemaal", + # 'afsluitmiddel', + "duikersifonhevel", + "hydroobject", + # 'peilgebiedvigerend', + # 'peilgebiedpraktijk', + # 'peilafwijkinggebied', + # 'streefpeil', + ], + engine="pyogrio", +) + +# the peilgebieden and streefpeilen do not contain overlapping values. Scheldestromen has delivered additional files as shapes +Scheldestromen["peilgebied"] = gpd.read_file( + "..\..\Data_preprocessed\Waterschappen\Scheldestromen\Praktijkpeilgebieden_20231204.shp" +) + + +Scheldestromen["peilgebied"]["code"] = Scheldestromen["peilgebied"]["GPGIDENT"] +Scheldestromen["peilgebied"]["globalid"] = Scheldestromen["peilgebied"]["GLOBALID"] +Scheldestromen["peilgebied"]["waterhoogte"] = Scheldestromen["peilgebied"]["GPGZP"] +Scheldestromen["peilgebied"]["nen3610id"] = "dummy_nen3610id_peilgebied_" + Scheldestromen["peilgebied"].index.astype( + str +) + +Scheldestromen["streefpeil"] = gpd.GeoDataFrame() +Scheldestromen["streefpeil"]["waterhoogte"] = Scheldestromen["peilgebied"]["waterhoogte"] +Scheldestromen["streefpeil"]["globalid"] = Scheldestromen["peilgebied"]["globalid"] +Scheldestromen["streefpeil"]["geometry"] = np.nan + + +Scheldestromen["peilgebied"] = Scheldestromen["peilgebied"][["code", "nen3610id", "globalid", "geometry"]] + + +# convert multiz points to points +Scheldestromen["stuw"].geometry = Scheldestromen["stuw"].centroid +Scheldestromen["gemaal"].geometry = Scheldestromen["gemaal"].centroid + + +# # Scheldestromen + + +# stuw +Scheldestromen["stuw"] = Scheldestromen["stuw"][["code", "nen3610id", "globalid", "geometry"]] + +# gemaal +Scheldestromen["gemaal"] = Scheldestromen["gemaal"][["code", "nen3610id", "globalid", "geometry"]] +Scheldestromen["gemaal"]["code"] = "dummy_code_gemaal_" + Scheldestromen["gemaal"].index.astype(str) + +# hydroobject +Scheldestromen["hydroobject"]["code"] = Scheldestromen["hydroobject"]["naam"] +Scheldestromen["hydroobject"] = Scheldestromen["hydroobject"][["code", "nen3610id", "globalid", "geometry"]] + +# duikersifonhevel +Scheldestromen["duikersifonhevel"] = Scheldestromen["duikersifonhevel"][["code", "nen3610id", "globalid", "geometry"]] + + +# pd.merge(left = Scheldestromen['peilgebiedpraktijk'], +# right = Scheldestromen['streefpeil'], +# left_on = 'globalid', +# right_on = 'peilgebiedpraktijkid') + + +Scheldestromen["stuw"] = Scheldestromen["stuw"][["code", "nen3610id", "globalid", "geometry"]] +Scheldestromen["gemaal"] = Scheldestromen["gemaal"][["code", "nen3610id", "globalid", "geometry"]] +Scheldestromen["hydroobject"] = Scheldestromen["hydroobject"][["code", "nen3610id", "globalid", "geometry"]] +Scheldestromen["duikersifonhevel"] = Scheldestromen["duikersifonhevel"][["code", "nen3610id", "globalid", "geometry"]] + + +# Scheldestromen['peilgebiedpraktijk']['geometry'] = Scheldestromen['peilgebiedpraktijk'].buffer(distance = 0) +# Scheldestromen['peilafwijkinggebied']['geometry'] = Scheldestromen['peilafwijkinggebied'].buffer(distance = 0) + + +# peilgebied = pd.merge(left = Scheldestromen['streefpeil'], +# right = Scheldestromen['peilgebiedpraktijk'], +# left_on = 'peilgebiedpraktijkid', +# right_on = 'globalid') + + +# streefpeil = gpd.GeoDataFrame() +# streefpeil['waterhoogte'] = peilgebied['waterhoogte'] +# streefpeil['globalid'] = peilgebied['peilgebiedpraktijkid'] +# streefpeil['geometry'] = np.nan +# Scheldestromen['streefpeil'] = gpd.GeoDataFrame(streefpeil) + + +# Scheldestromen['peilgebied'] = gpd.GeoDataFrame() +# Scheldestromen['peilgebied'][['code', 'nen3610id', 'globalid', 'geometry']] = peilgebied[['code', 'nen3610id_y', 'globalid_y', 'geometry_y']] + + +# delete irrelvant data +variables = ["peilgebiedpraktijk", "peilgebiedvigerend", "peilafwijkinggebied"] + +for variable in variables: + if str(variable) in Scheldestromen: + del Scheldestromen[variable] + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=Scheldestromen) + + +for i in range(len(Scheldestromen["peilgebied"])): + if Scheldestromen["peilgebied"]["geometry"].at[i].geom_type == "Polygon": + Scheldestromen["peilgebied"].loc[i, "geometry"].plot() + + +merged = pd.merge(left=Scheldestromen["peilgebied"], right=Scheldestromen["streefpeil"], on="globalid") + +merged[merged.waterhoogte.isna()] + + +# ### Store data + + +# Check if the directory exists +if not os.path.exists(output_gpkg_path): + # If it doesn't exist, create it + os.makedirs(output_gpkg_path) + +store_data(waterschap=Scheldestromen, output_gpkg_path=output_gpkg_path + "/Scheldestromen") + + +Scheldestromen["hydroobject"] diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.ipynb deleted file mode 100644 index f5278bb..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.ipynb +++ /dev/null @@ -1,409 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import geopandas as gpd\n", - "import pandas as pd\n", - "from general_functions import *\n", - "\n", - "pd.set_option(\"display.max_columns\", None)" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "# Wetterskip Fryslan" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "gpkg_path_Wetterskip = \"..\\..\\Data_preprocessed\\Waterschappen\\Wetterskip\\Wetterskip_gecorrigeerde_geometrie.gpkg\"\n", - "output_gpkg_path_Wetterskip = \"../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip\"\n", - "\n", - "peilgebiedpraktijk_path = r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\Wetterskip\\Nieuwe_peilgebieden\\MIPWA_20230907WF.gpkg\"\n", - "streefpeil_path = r\"D:\\Users\\Bruijns\\Documents\\PR4750_20\\Data_preprocessed\\Waterschappen\\Wetterskip\\Nieuwe_peilgebieden\\MIPWA_20230907WF.gpkg\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "# Duikersifonhevel and hydroobject have a type of multicurvedZ, the peilgebieden a MultiSurfaceZ, which geopandas can not handle. I have manually\n", - "# exported these to single shapes, which automatically converts it to regular MultiStrings. Then these layers have been packed together to a\n", - "# geopackage again.\n", - "\n", - "# retrieve the data\n", - "Wetterskip = read_gpkg_layers(\n", - " gpkg_path=gpkg_path_Wetterskip,\n", - " variables=[\"stuw\", \"gemaal\", \"afsluitmiddel\", \"hydroobject\", \"duikersifonhevel\"],\n", - " # 'peilmerk',\n", - " # 'streefpeil',\n", - " # 'peilgebiedpraktijk',\n", - " # 'peilgebiedvigerend',\n", - " # 'peilbesluitgebied'],\n", - " print_var=False,\n", - ")\n", - "\n", - "# The peilgebieden en streefpeilen have been delivered as additional data\n", - "peilgebiedpraktijk = gpd.read_file(peilgebiedpraktijk_path, layer=\"DAMO_W_PeilgebiedPraktijk\")\n", - "\n", - "streefpeil = gpd.read_file(streefpeil_path, layer=\"DAMO_W_Streefpeil\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# merge the streefpeil and the peilgebieden\n", - "peilgebied = pd.merge(left=streefpeil, right=peilgebiedpraktijk, left_on=\"PEILGEBIEDPRAKTIJKID\", right_on=\"GLOBALID\")\n", - "\n", - "peilgebied = gpd.GeoDataFrame(peilgebied, geometry=\"geometry_y\", crs=\"EPSG:28992\")\n", - "peilgebied.reset_index(drop=True, inplace=True)\n", - "peilgebied.plot()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "peilgebied = peilgebied.explode(ignore_index=True, index_parts=False)\n", - "# peilgebied" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "Wetterskip[\"peilgebied\"] = peilgebied[[\"WATERHOOGTE\", \"nen3610id_y\", \"GLOBALID_y\", \"geometry_y\"]]\n", - "Wetterskip[\"peilgebied\"] = Wetterskip[\"peilgebied\"].rename(\n", - " columns={\n", - " \"WATERHOOGTE\": \"waterhoogte\",\n", - " \"nen3610id_y\": \"nen3610id\",\n", - " \"GLOBALID_y\": \"globalid\",\n", - " \"geometry_y\": \"geometry\",\n", - " }\n", - ")\n", - "\n", - "# give new globalids and codes, as the peilgebied.explode() results in non unique values.\n", - "Wetterskip[\"peilgebied\"][\"code\"] = \"dummy_code_peilgebied_\" + Wetterskip[\"peilgebied\"].index.astype(str)\n", - "Wetterskip[\"peilgebied\"][\"globalid\"] = \"dummy_globalid_peilgebied_\" + Wetterskip[\"peilgebied\"].index.astype(str)\n", - "\n", - "# create the streefpeilen layer\n", - "Wetterskip[\"streefpeil\"] = Wetterskip[\"peilgebied\"][[\"waterhoogte\", \"globalid\", \"geometry\"]]\n", - "Wetterskip[\"peilgebied\"] = Wetterskip[\"peilgebied\"][[\"code\", \"nen3610id\", \"globalid\", \"geometry\"]]\n", - "\n", - "Wetterskip[\"streefpeil\"][\"geometry\"] = None\n", - "\n", - "Wetterskip[\"streefpeil\"] = gpd.GeoDataFrame(Wetterskip[\"streefpeil\"], geometry=\"geometry\", crs=\"EPSG:28992\")\n", - "Wetterskip[\"peilgebied\"] = gpd.GeoDataFrame(Wetterskip[\"peilgebied\"], geometry=\"geometry\", crs=\"EPSG:28992\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "Wetterskip[\"stuw\"][\"geometry\"] = Wetterskip[\"stuw\"].centroid # prevent strange geometries\n", - "Wetterskip[\"gemaal\"][\"geometry\"] = Wetterskip[\"gemaal\"].centroid # prevent strange geometries\n", - "\n", - "Wetterskip[\"hydroobject\"] = Wetterskip[\"hydroobject\"].loc[\n", - " Wetterskip[\"hydroobject\"].categorieo == \"primair\"\n", - "] # feedback WS Fryslan: only use primaire hydroobjecten" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# determine aanvoer en afvoer gemalen\n", - "Wetterskip[\"gemaal\"][\"func_aanvoer\"], Wetterskip[\"gemaal\"][\"func_afvoer\"], Wetterskip[\"gemaal\"][\"func_circulatie\"] = (\n", - " False,\n", - " False,\n", - " False,\n", - ") # default is False\n", - "Wetterskip[\"gemaal\"][\"functiegemaal\"] = Wetterskip[\"gemaal\"][\"functiegemaal\"].astype(str)\n", - "\n", - "Wetterskip[\"gemaal\"].loc[\n", - " Wetterskip[\"gemaal\"].functiegemaal.str.contains(\"Onbekend|Onderbemaling|Afvoergemaal\"), \"func_afvoer\"\n", - "] = True\n", - "Wetterskip[\"gemaal\"].loc[Wetterskip[\"gemaal\"].functiegemaal.str.contains(\"Opmaling|Aanvoer\"), \"func_aanvoer\"] = True\n", - "Wetterskip[\"gemaal\"].loc[Wetterskip[\"gemaal\"].functiegemaal.str.contains(\"Overig|circulatie\"), \"func_circulatie\"] = True\n", - "Wetterskip[\"gemaal\"].loc[\n", - " (Wetterskip[\"gemaal\"].func_afvoer is False)\n", - " & (Wetterskip[\"gemaal\"].func_aanvoer is False)\n", - " & (Wetterskip[\"gemaal\"].func_circulatie is False),\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# points\n", - "Wetterskip[\"stuw\"] = Wetterskip[\"stuw\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "Wetterskip[\"gemaal\"] = Wetterskip[\"gemaal\"][\n", - " [\"code\", \"globalid\", \"nen3610id\", \"func_afvoer\", \"func_aanvoer\", \"func_circulatie\", \"geometry\"]\n", - "]\n", - "Wetterskip[\"afsluitmiddel\"] = Wetterskip[\"afsluitmiddel\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "\n", - "# lines\n", - "Wetterskip[\"hydroobject\"] = Wetterskip[\"hydroobject\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]\n", - "Wetterskip[\"duikersifonhevel\"] = Wetterskip[\"duikersifonhevel\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "# #delete irrelvant data\n", - "# variables = ['peilmerk',\n", - "# 'peilgebiedpraktijk',\n", - "# 'peilgebiedvigerend',\n", - "# 'peilbesluitgebied',\n", - "# 'peilgebiedpraktijk']\n", - "\n", - "# for variable in variables:\n", - "# if str(variable) in Wetterskip:\n", - "# del Wetterskip[variable]\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# add duikersifonhevels to the hydroobjecten\n", - "Wetterskip[\"hydroobject\"] = pd.concat([Wetterskip[\"hydroobject\"], Wetterskip[\"duikersifonhevel\"]])\n", - "Wetterskip[\"hydroobject\"] = Wetterskip[\"hydroobject\"].drop_duplicates(\n", - " subset=\"globalid\"\n", - ") # in case it is run multiple times\n", - "Wetterskip[\"hydroobject\"] = gpd.GeoDataFrame(Wetterskip[\"hydroobject\"]).set_crs(\"epsg:28992\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=Wetterskip)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "store_data(waterschap=Wetterskip, output_gpkg_path=output_gpkg_path_Wetterskip)" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": { - "jp-MarkdownHeadingCollapsed": true, - "tags": [] - }, - "source": [ - "# Oud" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# layout_path = r\"..\\..\\Data_postprocessed\\QGIS_overzicht\\routing_style_format3.gpkg\"\n", - "# output_layout_path = r\"..\\..\\Data_postprocessed\\QGIS_overzicht\\routing_style_format2_saved\"\n", - "# layout = read_gpkg_layers(gpkg_path = layout_path,\n", - "# variables = ['stuw',\n", - "# 'gemaal',\n", - "# 'afsluitmiddel'])#,\n", - "# # 'hydroobject',\n", - "# # 'duikersifonhevel',\n", - "# # 'streefpeil',\n", - "# # 'peilgebiedpraktijk',\n", - "# # 'peilgebiedvigerend'])\n", - "# store_data(waterschap = layout,\n", - "# output_gpkg_path = output_layout_path)\n" - ] - }, - { - "cell_type": "raw", - "id": "16", - "metadata": {}, - "source": [ - "There are some peilgebieden without peil. Merge the peilgebied praktijk and the peilgebiedvigerend. Then, take the difference between this merged peilgebied and the peilbesluit gebied. The leftover areas should get a streefpeil based on the layer of peilmerk." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "# peilgebieden_met_peil = peilgebieden_met_peil.rename(columns = {'code_left':'code',\n", - "# 'globalid_left':'globalid',\n", - "# 'nen3610id_left':'nen3610id',\n", - "# 'geometry_left':'geometry',\n", - "# 'hoogte':'waterhoogte'})\n", - "# peilgebieden_met_peil = peilgebieden_met_peil[['waterhoogte','code', 'globalid', 'nen3610id', 'geometry']].reset_index(drop=True)\n", - "# peilgebieden_met_peil = peilgebieden_met_peil.drop_duplicates(subset='globalid')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# #bring the peilgebied in the correct format\n", - "# extra_peilgebied = peilgebieden_met_peil[['waterhoogte','code','globalid','nen3610id','geometry']].reset_index(drop=True)\n", - "\n", - "# #bring the streefpeil in the correct format\n", - "# extra_peil = peilgebieden_met_peil[['waterhoogte', 'globalid']]\n", - "# extra_peil = extra_peil.rename(columns = {'globalid':'peilgebiedpraktijkid'})\n", - "# extra_peil['peilgebiedvigerendid'] = None\n", - "# extra_peil['geometry'] = None\n", - "\n", - "# #add semi dummy globalid's and nen3610id's\n", - "# extra_peil['globalid'], extra_peil['nen3610id'] = np.arange(0, len(extra_peil)), np.arange(0, len(extra_peil))\n", - "# extra_peil['globalid'] = 'globalid_wetterskip_streefpeil_' + extra_peil['globalid'].astype(str)\n", - "# extra_peil['nen3610id'] = 'nen3610id_wetterskip_' + extra_peil['nen3610id'].astype(str)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "# #add the (geo)dataframes together\n", - "# Wetterskip['peilgebied'] = gpd.GeoDataFrame(pd.concat([peilgebied_PV, extra_peilgebied])).reset_index(drop=True)\n", - "# Wetterskip['streefpeil'] = gpd.GeoDataFrame(pd.concat([Wetterskip['streefpeil'], extra_peil])).reset_index(drop=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# pd.merge(left=Wetterskip['streefpeil'],\n", - "# right=peilgebied_PV,\n", - "# left_on='peilgebiedid',\n", - "# right_on='globalid')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "# Wetterskip['streefpeil']['peilgebiedid'] = None\n", - "# Wetterskip['streefpeil']['peilgebiedid'].fillna(Wetterskip['streefpeil']['peilgebiedvigerendid'], inplace = True)\n", - "# Wetterskip['streefpeil']['peilgebiedid'].fillna(Wetterskip['streefpeil']['peilgebiedpraktijkid'], inplace = True)\n", - "\n", - "# #move the peilgebiedid id to both the peilgebiedenas well as the streefpeilen\n", - "# Wetterskip['peilgebied'] = gpd.GeoDataFrame()\n", - "# Wetterskip['peilgebied']['peilgebiedid'] = Wetterskip['streefpeil']['peilgebiedid']\n", - "\n", - "# Wetterskip['peilgebied'][['code','globalid','nen3610id']] = Wetterskip['streefpeil'][['code','globalid','nen3610id',]]\n", - "\n", - "\n", - "# #the peilgebieden have been merged. Drop the irrelevant columns\n", - "# Wetterskip['streefpeil'] = Wetterskip['streefpeil'][['waterhoogte', 'peilgebiedid']]#.drop(columns=['peilgebiedvigerendid', 'peilgebiedpraktijkid'], inplace = True)\n", - "# # Wetterskip['peilgebied'] =" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py new file mode 100644 index 0000000..ab07e73 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py @@ -0,0 +1,212 @@ +# import packages and functions +import geopandas as gpd +import pandas as pd +from general_functions import * + +pd.set_option("display.max_columns", None) + + +# # Wetterskip Fryslan + + +gpkg_path_Wetterskip = "../../Data_preprocessed/Waterschappen/Wetterskip/Wetterskip_gecorrigeerde_geometrie.gpkg" +output_gpkg_path_Wetterskip = "../../Data_postprocessed/Waterschappen/Wetterskip/Wetterskip" + +peilgebiedpraktijk_path = r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\Wetterskip\Nieuwe_peilgebieden\MIPWA_20230907WF.gpkg" +streefpeil_path = r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\Wetterskip\Nieuwe_peilgebieden\MIPWA_20230907WF.gpkg" + + +# Duikersifonhevel and hydroobject have a type of multicurvedZ, the peilgebieden a MultiSurfaceZ, which geopandas can not handle. I have manually +# exported these to single shapes, which automatically converts it to regular MultiStrings. Then these layers have been packed together to a +# geopackage again. + +# retrieve the data +Wetterskip = read_gpkg_layers( + gpkg_path=gpkg_path_Wetterskip, + variables=["stuw", "gemaal", "afsluitmiddel", "hydroobject", "duikersifonhevel"], + # 'peilmerk', + # 'streefpeil', + # 'peilgebiedpraktijk', + # 'peilgebiedvigerend', + # 'peilbesluitgebied'], + print_var=False, +) + +# The peilgebieden en streefpeilen have been delivered as additional data +peilgebiedpraktijk = gpd.read_file(peilgebiedpraktijk_path, layer="DAMO_W_PeilgebiedPraktijk") + +streefpeil = gpd.read_file(streefpeil_path, layer="DAMO_W_Streefpeil") + + +# merge the streefpeil and the peilgebieden +peilgebied = pd.merge(left=streefpeil, right=peilgebiedpraktijk, left_on="PEILGEBIEDPRAKTIJKID", right_on="GLOBALID") + +peilgebied = gpd.GeoDataFrame(peilgebied, geometry="geometry_y", crs="EPSG:28992") +peilgebied.reset_index(drop=True, inplace=True) +peilgebied.plot() + + +peilgebied = peilgebied.explode(ignore_index=True, index_parts=False) +# peilgebied + + +Wetterskip["peilgebied"] = peilgebied[["WATERHOOGTE", "nen3610id_y", "GLOBALID_y", "geometry_y"]] +Wetterskip["peilgebied"] = Wetterskip["peilgebied"].rename( + columns={ + "WATERHOOGTE": "waterhoogte", + "nen3610id_y": "nen3610id", + "GLOBALID_y": "globalid", + "geometry_y": "geometry", + } +) + +# give new globalids and codes, as the peilgebied.explode() results in non unique values. +Wetterskip["peilgebied"]["code"] = "dummy_code_peilgebied_" + Wetterskip["peilgebied"].index.astype(str) +Wetterskip["peilgebied"]["globalid"] = "dummy_globalid_peilgebied_" + Wetterskip["peilgebied"].index.astype(str) + +# create the streefpeilen layer +Wetterskip["streefpeil"] = Wetterskip["peilgebied"][["waterhoogte", "globalid", "geometry"]] +Wetterskip["peilgebied"] = Wetterskip["peilgebied"][["code", "nen3610id", "globalid", "geometry"]] + +Wetterskip["streefpeil"]["geometry"] = None + +Wetterskip["streefpeil"] = gpd.GeoDataFrame(Wetterskip["streefpeil"], geometry="geometry", crs="EPSG:28992") +Wetterskip["peilgebied"] = gpd.GeoDataFrame(Wetterskip["peilgebied"], geometry="geometry", crs="EPSG:28992") + + +Wetterskip["stuw"]["geometry"] = Wetterskip["stuw"].centroid # prevent strange geometries +Wetterskip["gemaal"]["geometry"] = Wetterskip["gemaal"].centroid # prevent strange geometries + +Wetterskip["hydroobject"] = Wetterskip["hydroobject"].loc[ + Wetterskip["hydroobject"].categorieo == "primair" +] # feedback WS Fryslan: only use primaire hydroobjecten + + +# determine aanvoer en afvoer gemalen +Wetterskip["gemaal"]["func_aanvoer"], Wetterskip["gemaal"]["func_afvoer"], Wetterskip["gemaal"]["func_circulatie"] = ( + False, + False, + False, +) # default is False +Wetterskip["gemaal"]["functiegemaal"] = Wetterskip["gemaal"]["functiegemaal"].astype(str) + +Wetterskip["gemaal"].loc[ + Wetterskip["gemaal"].functiegemaal.str.contains("Onbekend|Onderbemaling|Afvoergemaal"), "func_afvoer" +] = True +Wetterskip["gemaal"].loc[Wetterskip["gemaal"].functiegemaal.str.contains("Opmaling|Aanvoer"), "func_aanvoer"] = True +Wetterskip["gemaal"].loc[Wetterskip["gemaal"].functiegemaal.str.contains("Overig|circulatie"), "func_circulatie"] = True +Wetterskip["gemaal"].loc[ + (Wetterskip["gemaal"].func_afvoer is False) + & (Wetterskip["gemaal"].func_aanvoer is False) + & (Wetterskip["gemaal"].func_circulatie is False), + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + + +# points +Wetterskip["stuw"] = Wetterskip["stuw"][["code", "globalid", "nen3610id", "geometry"]] +Wetterskip["gemaal"] = Wetterskip["gemaal"][ + ["code", "globalid", "nen3610id", "func_afvoer", "func_aanvoer", "func_circulatie", "geometry"] +] +Wetterskip["afsluitmiddel"] = Wetterskip["afsluitmiddel"][["code", "globalid", "nen3610id", "geometry"]] + +# lines +Wetterskip["hydroobject"] = Wetterskip["hydroobject"][["code", "globalid", "nen3610id", "geometry"]] +Wetterskip["duikersifonhevel"] = Wetterskip["duikersifonhevel"][["code", "globalid", "nen3610id", "geometry"]] + + +# #delete irrelvant data +# variables = ['peilmerk', +# 'peilgebiedpraktijk', +# 'peilgebiedvigerend', +# 'peilbesluitgebied', +# 'peilgebiedpraktijk'] + +# for variable in variables: +# if str(variable) in Wetterskip: +# del Wetterskip[variable] + + +# add duikersifonhevels to the hydroobjecten +Wetterskip["hydroobject"] = pd.concat([Wetterskip["hydroobject"], Wetterskip["duikersifonhevel"]]) +Wetterskip["hydroobject"] = Wetterskip["hydroobject"].drop_duplicates( + subset="globalid" +) # in case it is run multiple times +Wetterskip["hydroobject"] = gpd.GeoDataFrame(Wetterskip["hydroobject"]).set_crs("epsg:28992") + + +show_layers_and_columns(waterschap=Wetterskip) + + +store_data(waterschap=Wetterskip, output_gpkg_path=output_gpkg_path_Wetterskip) + + +# # Oud + + +# layout_path = r"..\..\Data_postprocessed\QGIS_overzicht\routing_style_format3.gpkg" +# output_layout_path = r"..\..\Data_postprocessed\QGIS_overzicht\routing_style_format2_saved" +# layout = read_gpkg_layers(gpkg_path = layout_path, +# variables = ['stuw', +# 'gemaal', +# 'afsluitmiddel'])#, +# # 'hydroobject', +# # 'duikersifonhevel', +# # 'streefpeil', +# # 'peilgebiedpraktijk', +# # 'peilgebiedvigerend']) +# store_data(waterschap = layout, +# output_gpkg_path = output_layout_path) + +# There are some peilgebieden without peil. Merge the peilgebied praktijk and the peilgebiedvigerend. Then, take the difference between this merged peilgebied and the peilbesluit gebied. The leftover areas should get a streefpeil based on the layer of peilmerk. + + +# peilgebieden_met_peil = peilgebieden_met_peil.rename(columns = {'code_left':'code', +# 'globalid_left':'globalid', +# 'nen3610id_left':'nen3610id', +# 'geometry_left':'geometry', +# 'hoogte':'waterhoogte'}) +# peilgebieden_met_peil = peilgebieden_met_peil[['waterhoogte','code', 'globalid', 'nen3610id', 'geometry']].reset_index(drop=True) +# peilgebieden_met_peil = peilgebieden_met_peil.drop_duplicates(subset='globalid') + + +# #bring the peilgebied in the correct format +# extra_peilgebied = peilgebieden_met_peil[['waterhoogte','code','globalid','nen3610id','geometry']].reset_index(drop=True) + +# #bring the streefpeil in the correct format +# extra_peil = peilgebieden_met_peil[['waterhoogte', 'globalid']] +# extra_peil = extra_peil.rename(columns = {'globalid':'peilgebiedpraktijkid'}) +# extra_peil['peilgebiedvigerendid'] = None +# extra_peil['geometry'] = None + +# #add semi dummy globalid's and nen3610id's +# extra_peil['globalid'], extra_peil['nen3610id'] = np.arange(0, len(extra_peil)), np.arange(0, len(extra_peil)) +# extra_peil['globalid'] = 'globalid_wetterskip_streefpeil_' + extra_peil['globalid'].astype(str) +# extra_peil['nen3610id'] = 'nen3610id_wetterskip_' + extra_peil['nen3610id'].astype(str) + + +# #add the (geo)dataframes together +# Wetterskip['peilgebied'] = gpd.GeoDataFrame(pd.concat([peilgebied_PV, extra_peilgebied])).reset_index(drop=True) +# Wetterskip['streefpeil'] = gpd.GeoDataFrame(pd.concat([Wetterskip['streefpeil'], extra_peil])).reset_index(drop=True) + + +# pd.merge(left=Wetterskip['streefpeil'], +# right=peilgebied_PV, +# left_on='peilgebiedid', +# right_on='globalid') + + +# Wetterskip['streefpeil']['peilgebiedid'] = None +# Wetterskip['streefpeil']['peilgebiedid'].fillna(Wetterskip['streefpeil']['peilgebiedvigerendid'], inplace = True) +# Wetterskip['streefpeil']['peilgebiedid'].fillna(Wetterskip['streefpeil']['peilgebiedpraktijkid'], inplace = True) + +# #move the peilgebiedid id to both the peilgebiedenas well as the streefpeilen +# Wetterskip['peilgebied'] = gpd.GeoDataFrame() +# Wetterskip['peilgebied']['peilgebiedid'] = Wetterskip['streefpeil']['peilgebiedid'] + +# Wetterskip['peilgebied'][['code','globalid','nen3610id']] = Wetterskip['streefpeil'][['code','globalid','nen3610id',]] + + +# #the peilgebieden have been merged. Drop the irrelevant columns +# Wetterskip['streefpeil'] = Wetterskip['streefpeil'][['waterhoogte', 'peilgebiedid']]#.drop(columns=['peilgebiedvigerendid', 'peilgebiedpraktijkid'], inplace = True) +# # Wetterskip['peilgebied'] = diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.ipynb b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.ipynb deleted file mode 100644 index dbb81e7..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.ipynb +++ /dev/null @@ -1,334 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import packages and functions\n", - "import os\n", - "\n", - "import fiona\n", - "import geopandas as gpd\n", - "import numpy as np\n", - "import pandas as pd\n", - "from general_functions import *" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": {}, - "outputs": [], - "source": [ - "pd.set_option(\"display.max_columns\", None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "# define relative paths\n", - "waterschap = \"Zuiderzeeland\"\n", - "path_zzl = \"..\\..\\Data_preprocessed\\Waterschappen\\Zuiderzeeland\"\n", - "output_gpkg_path = \"../../Data_postprocessed/Waterschappen/Zuiderzeeland\"" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "# Zuiderzeeland" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "Zuiderzeeland = {}\n", - "\n", - "Zuiderzeeland[\"gemaal\"] = gpd.read_file(path_zzl + \"\\gemalen.gpkg\")\n", - "Zuiderzeeland[\"hevels\"] = gpd.read_file(path_zzl + \"\\overigekunstwerken.gpkg\")\n", - "# Zuiderzeeland['peilgebied'] = gpd.read_file(path_zzl + '\\peilgebieden.gpkg')\n", - "Zuiderzeeland[\"peilgebied\"] = gpd.read_file(path_zzl + \"\\peilvakken_nalevering.gpkg\")\n", - "\n", - "\n", - "# use fiona for the duikersifonhevels and watergangen due to unexpted geometry types\n", - "with fiona.open(path_zzl + \"/Duikers.gpkg\", \"r\") as file:\n", - " # Read the contents and store them in the GeoDataFrame\n", - " Zuiderzeeland[\"duikersifonhevel\"] = gpd.GeoDataFrame.from_features(file, crs=\"EPSG:28992\")\n", - "\n", - "with fiona.open(path_zzl + \"/zzl_watergangen_nalevering/zzl_Watergangen.shp\", \"r\") as file:\n", - " # Read the contents and store them in the GeoDataFrame\n", - " Zuiderzeeland[\"hydroobject\"] = gpd.GeoDataFrame.from_features(file)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "Zuiderzeeland[\"hydroobject\"] = Zuiderzeeland[\"hydroobject\"].set_crs(crs=\"WGS84\", allow_override=True)\n", - "Zuiderzeeland[\"hydroobject\"] = Zuiderzeeland[\"hydroobject\"].to_crs(crs=\"EPSG:28992\")" - ] - }, - { - "cell_type": "raw", - "id": "6", - "metadata": {}, - "source": [ - "ZZL: stuwen in KWKSOORT in overigekunstwerken.gpkg" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "KWKSOORT_stuw = [\"Constructie\", \"inlaat\", \"uitlaat\", \"keerwand\"] # gebasseerd op de geleverde data van Zuiderzeeland\n", - "\n", - "Zuiderzeeland[\"stuw\"] = (\n", - " Zuiderzeeland[\"hevels\"].loc[Zuiderzeeland[\"hevels\"][\"KWKSOORT\"].isin(KWKSOORT_stuw)].reset_index(drop=True)\n", - ")\n", - "Zuiderzeeland[\"stuw\"].geometry = Zuiderzeeland[\"stuw\"].centroid # prevent pointZ geometries" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "# distinguish multiple parameters from the same gpkg\n", - "Zuiderzeeland[\"afsluitmiddel\"] = (\n", - " Zuiderzeeland[\"hevels\"].loc[Zuiderzeeland[\"hevels\"][\"KWKSOORT\"] == \"Afsluitmiddel (groot)\"].reset_index(drop=True)\n", - ")\n", - "Zuiderzeeland[\"hevels\"] = (\n", - " Zuiderzeeland[\"hevels\"].loc[Zuiderzeeland[\"hevels\"][\"KWKSOORT\"] == \"Hevel\"].reset_index(drop=True)\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "# determine aanvoer en afvoer gemalen\n", - "(\n", - " Zuiderzeeland[\"gemaal\"][\"func_aanvoer\"],\n", - " Zuiderzeeland[\"gemaal\"][\"func_afvoer\"],\n", - " Zuiderzeeland[\"gemaal\"][\"func_circulatie\"],\n", - ") = False, False, False # default is False\n", - "Zuiderzeeland[\"gemaal\"][\"functiegemaal\"] = Zuiderzeeland[\"gemaal\"][\"KGMFUNC\"].astype(str)\n", - "Zuiderzeeland[\"gemaal\"].loc[Zuiderzeeland[\"gemaal\"][\"functiegemaal\"] == \"onbekend\", \"functiegemaal\"] = (\n", - " np.nan\n", - ") # replace onbekend with nan, will be filled up later see one line below\n", - "Zuiderzeeland[\"gemaal\"][\"functiegemaal\"].fillna(\n", - " Zuiderzeeland[\"gemaal\"][\"KGMSOORT\"], inplace=True\n", - ") # some additional is given in this column\n", - "\n", - "Zuiderzeeland[\"gemaal\"].loc[\n", - " Zuiderzeeland[\"gemaal\"].functiegemaal.str.contains(\"af-|afvoer|onderbemaling\"), \"func_afvoer\"\n", - "] = True\n", - "Zuiderzeeland[\"gemaal\"].loc[\n", - " Zuiderzeeland[\"gemaal\"].functiegemaal.str.contains(\"aanvoergemaal|opmaling\"), \"func_aanvoer\"\n", - "] = True\n", - "Zuiderzeeland[\"gemaal\"].loc[Zuiderzeeland[\"gemaal\"].functiegemaal.str.contains(\"circulatie\"), \"func_circulatie\"] = True\n", - "Zuiderzeeland[\"gemaal\"].loc[\n", - " (Zuiderzeeland[\"gemaal\"].func_afvoer is False)\n", - " & (Zuiderzeeland[\"gemaal\"].func_aanvoer is False)\n", - " & (Zuiderzeeland[\"gemaal\"].func_circulatie is False),\n", - " \"func_afvoer\",\n", - "] = True # set to afvoergemaal is there the function is unknown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "# Gemaal\n", - "Zuiderzeeland[\"gemaal\"] = Zuiderzeeland[\"gemaal\"][\n", - " [\"KGMIDENT\", \"GLOBALID\", \"func_aanvoer\", \"func_afvoer\", \"func_circulatie\", \"geometry\"]\n", - "]\n", - "Zuiderzeeland[\"gemaal\"] = Zuiderzeeland[\"gemaal\"].rename(columns={\"KGMIDENT\": \"code\", \"GLOBALID\": \"globalid\"})\n", - "Zuiderzeeland[\"gemaal\"][\"nen3610id\"] = \"dummy_nen3610id_gemaal_\" + Zuiderzeeland[\"gemaal\"].index.astype(str)\n", - "\n", - "# Hydroobject\n", - "Zuiderzeeland[\"hydroobject\"] = Zuiderzeeland[\"hydroobject\"][[\"OWAIDENT\", \"GLOBALID\", \"geometry\"]]\n", - "Zuiderzeeland[\"hydroobject\"] = Zuiderzeeland[\"hydroobject\"].rename(columns={\"OWAIDENT\": \"code\", \"GLOBALID\": \"globalid\"})\n", - "Zuiderzeeland[\"hydroobject\"][\"nen3610id\"] = \"dummy_nen3610id_hydroobject_\" + Zuiderzeeland[\"hydroobject\"].index.astype(\n", - " str\n", - ")\n", - "\n", - "# duikersifonhevel\n", - "Zuiderzeeland[\"duikersifonhevel\"] = Zuiderzeeland[\"duikersifonhevel\"][[\"KDUIDENT\", \"GLOBALID\", \"geometry\"]]\n", - "Zuiderzeeland[\"duikersifonhevel\"] = Zuiderzeeland[\"duikersifonhevel\"].rename(\n", - " columns={\"KDUIDENT\": \"code\", \"GLOBALID\": \"globalid\"}\n", - ")\n", - "Zuiderzeeland[\"duikersifonhevel\"][\"nen3610id\"] = \"dummy_nen3610id_duikersifonhevel_\" + Zuiderzeeland[\n", - " \"duikersifonhevel\"\n", - "].index.astype(str)\n", - "\n", - "# hevels\n", - "Zuiderzeeland[\"hevels\"] = Zuiderzeeland[\"hevels\"][[\"KWKIDENT\", \"GLOBALID\", \"geometry\"]]\n", - "Zuiderzeeland[\"hevels\"] = Zuiderzeeland[\"hevels\"].rename(columns={\"KWKIDENT\": \"code\", \"GLOBALID\": \"globalid\"})\n", - "Zuiderzeeland[\"hevels\"][\"nen3610id\"] = \"dummy_nen3610id_hevels_\" + Zuiderzeeland[\"hevels\"].index.astype(str)\n", - "# add to the duikersifonhevel\n", - "Zuiderzeeland[\"duikersifonhevel\"] = gpd.GeoDataFrame(\n", - " pd.concat((Zuiderzeeland[\"duikersifonhevel\"], Zuiderzeeland[\"hevels\"]))\n", - ")\n", - "\n", - "# stuw\n", - "Zuiderzeeland[\"stuw\"] = Zuiderzeeland[\"stuw\"][[\"KWKIDENT\", \"GLOBALID\", \"geometry\", \"KWKSOORT\"]]\n", - "Zuiderzeeland[\"stuw\"] = Zuiderzeeland[\"stuw\"].rename(\n", - " columns={\"KWKIDENT\": \"code\", \"GLOBALID\": \"globalid\", \"KWKSOORT\": \"KWKsoort\"}\n", - ")\n", - "Zuiderzeeland[\"stuw\"] = Zuiderzeeland[\"stuw\"].set_crs(\"EPSG:28992\")\n", - "Zuiderzeeland[\"stuw\"][\"nen3610id\"] = \"dummy_nen3610id_stuw_\" + Zuiderzeeland[\"stuw\"].index.astype(str)\n", - "\n", - "# afsluitmiddel\n", - "Zuiderzeeland[\"afsluitmiddel\"] = Zuiderzeeland[\"afsluitmiddel\"][[\"KWKIDENT\", \"GLOBALID\", \"geometry\"]]\n", - "Zuiderzeeland[\"afsluitmiddel\"] = Zuiderzeeland[\"afsluitmiddel\"].rename(\n", - " columns={\"KWKIDENT\": \"code\", \"GLOBALID\": \"globalid\"}\n", - ")\n", - "Zuiderzeeland[\"afsluitmiddel\"][\"nen3610id\"] = \"dummy_nen3610id_hevels_\" + Zuiderzeeland[\"afsluitmiddel\"].index.astype(\n", - " str\n", - ")\n", - "\n", - "# peilgebied\n", - "Zuiderzeeland[\"peilgebied\"] = Zuiderzeeland[\"peilgebied\"][[\"DHYDRO_ZMRPL\", \"GPGIDENT\", \"geometry\"]]\n", - "Zuiderzeeland[\"peilgebied\"][\"nen3610id\"] = \"dummy_nen3610id_peilgebied_\" + Zuiderzeeland[\"peilgebied\"].index.astype(str)\n", - "Zuiderzeeland[\"peilgebied\"][\"globalid\"] = \"dummy_globalid_peilgebied_\" + Zuiderzeeland[\"peilgebied\"].index.astype(str)\n", - "Zuiderzeeland[\"peilgebied\"] = Zuiderzeeland[\"peilgebied\"].rename(\n", - " columns={\"DHYDRO_ZMRPL\": \"streefpeil\", \"GPGIDENT\": \"code\"}\n", - ")\n", - "Zuiderzeeland[\"peilgebied\"][\"globalid\"] = \"dummy_globalid_peilgebied_\" + Zuiderzeeland[\"peilgebied\"].index.astype(str)\n", - "\n", - "# streefpeil\n", - "Zuiderzeeland[\"streefpeil\"] = Zuiderzeeland[\"peilgebied\"][[\"streefpeil\", \"globalid\"]]\n", - "Zuiderzeeland[\"streefpeil\"][\"geometry\"] = np.nan\n", - "Zuiderzeeland[\"streefpeil\"].rename(columns={\"streefpeil\": \"waterhoogte\"}, inplace=True)\n", - "Zuiderzeeland[\"streefpeil\"] = gpd.GeoDataFrame(Zuiderzeeland[\"streefpeil\"], geometry=\"geometry\")\n", - "\n", - "# delete the streefpeil in the peilgebied for consistency\n", - "Zuiderzeeland[\"peilgebied\"] = Zuiderzeeland[\"peilgebied\"][[\"code\", \"globalid\", \"nen3610id\", \"geometry\"]]" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "### Check for the correct keys and columns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "show_layers_and_columns(waterschap=Zuiderzeeland)" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": {}, - "source": [ - "### Store data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# Check if the directory exists. If it doesn't exist, create it\n", - "\n", - "if not os.path.exists(output_gpkg_path):\n", - " os.makedirs(output_gpkg_path)\n", - "\n", - "store_data(waterschap=Zuiderzeeland, output_gpkg_path=output_gpkg_path + \"/Zuiderzeeland\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Routing", - "language": "python", - "name": "routing" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py new file mode 100644 index 0000000..a41fc62 --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py @@ -0,0 +1,174 @@ +# import packages and functions +import os + +import fiona +import geopandas as gpd +import numpy as np +import pandas as pd +from general_functions import * + +pd.set_option("display.max_columns", None) + + +# define relative paths +waterschap = "Zuiderzeeland" +path_zzl = "..\..\Data_preprocessed\Waterschappen\Zuiderzeeland" +output_gpkg_path = "../../Data_postprocessed/Waterschappen/Zuiderzeeland" + + +# # Zuiderzeeland + + +Zuiderzeeland = {} + +Zuiderzeeland["gemaal"] = gpd.read_file(path_zzl + "\gemalen.gpkg") +Zuiderzeeland["hevels"] = gpd.read_file(path_zzl + "\overigekunstwerken.gpkg") +# Zuiderzeeland['peilgebied'] = gpd.read_file(path_zzl + '\peilgebieden.gpkg') +Zuiderzeeland["peilgebied"] = gpd.read_file(path_zzl + "\peilvakken_nalevering.gpkg") + + +# use fiona for the duikersifonhevels and watergangen due to unexpted geometry types +with fiona.open(path_zzl + "/Duikers.gpkg", "r") as file: + # Read the contents and store them in the GeoDataFrame + Zuiderzeeland["duikersifonhevel"] = gpd.GeoDataFrame.from_features(file, crs="EPSG:28992") + +with fiona.open(path_zzl + "/zzl_watergangen_nalevering/zzl_Watergangen.shp", "r") as file: + # Read the contents and store them in the GeoDataFrame + Zuiderzeeland["hydroobject"] = gpd.GeoDataFrame.from_features(file) + + +Zuiderzeeland["hydroobject"] = Zuiderzeeland["hydroobject"].set_crs(crs="WGS84", allow_override=True) +Zuiderzeeland["hydroobject"] = Zuiderzeeland["hydroobject"].to_crs(crs="EPSG:28992") + +ZZL: stuwen in KWKSOORT in overigekunstwerken.gpkg + + +KWKSOORT_stuw = ["Constructie", "inlaat", "uitlaat", "keerwand"] # gebasseerd op de geleverde data van Zuiderzeeland + +Zuiderzeeland["stuw"] = ( + Zuiderzeeland["hevels"].loc[Zuiderzeeland["hevels"]["KWKSOORT"].isin(KWKSOORT_stuw)].reset_index(drop=True) +) +Zuiderzeeland["stuw"].geometry = Zuiderzeeland["stuw"].centroid # prevent pointZ geometries + + +# distinguish multiple parameters from the same gpkg +Zuiderzeeland["afsluitmiddel"] = ( + Zuiderzeeland["hevels"].loc[Zuiderzeeland["hevels"]["KWKSOORT"] == "Afsluitmiddel (groot)"].reset_index(drop=True) +) +Zuiderzeeland["hevels"] = ( + Zuiderzeeland["hevels"].loc[Zuiderzeeland["hevels"]["KWKSOORT"] == "Hevel"].reset_index(drop=True) +) + + +# determine aanvoer en afvoer gemalen +( + Zuiderzeeland["gemaal"]["func_aanvoer"], + Zuiderzeeland["gemaal"]["func_afvoer"], + Zuiderzeeland["gemaal"]["func_circulatie"], +) = False, False, False # default is False +Zuiderzeeland["gemaal"]["functiegemaal"] = Zuiderzeeland["gemaal"]["KGMFUNC"].astype(str) +Zuiderzeeland["gemaal"].loc[Zuiderzeeland["gemaal"]["functiegemaal"] == "onbekend", "functiegemaal"] = ( + np.nan +) # replace onbekend with nan, will be filled up later see one line below +Zuiderzeeland["gemaal"]["functiegemaal"].fillna( + Zuiderzeeland["gemaal"]["KGMSOORT"], inplace=True +) # some additional is given in this column + +Zuiderzeeland["gemaal"].loc[ + Zuiderzeeland["gemaal"].functiegemaal.str.contains("af-|afvoer|onderbemaling"), "func_afvoer" +] = True +Zuiderzeeland["gemaal"].loc[ + Zuiderzeeland["gemaal"].functiegemaal.str.contains("aanvoergemaal|opmaling"), "func_aanvoer" +] = True +Zuiderzeeland["gemaal"].loc[Zuiderzeeland["gemaal"].functiegemaal.str.contains("circulatie"), "func_circulatie"] = True +Zuiderzeeland["gemaal"].loc[ + (Zuiderzeeland["gemaal"].func_afvoer is False) + & (Zuiderzeeland["gemaal"].func_aanvoer is False) + & (Zuiderzeeland["gemaal"].func_circulatie is False), + "func_afvoer", +] = True # set to afvoergemaal is there the function is unknown + + +# Gemaal +Zuiderzeeland["gemaal"] = Zuiderzeeland["gemaal"][ + ["KGMIDENT", "GLOBALID", "func_aanvoer", "func_afvoer", "func_circulatie", "geometry"] +] +Zuiderzeeland["gemaal"] = Zuiderzeeland["gemaal"].rename(columns={"KGMIDENT": "code", "GLOBALID": "globalid"}) +Zuiderzeeland["gemaal"]["nen3610id"] = "dummy_nen3610id_gemaal_" + Zuiderzeeland["gemaal"].index.astype(str) + +# Hydroobject +Zuiderzeeland["hydroobject"] = Zuiderzeeland["hydroobject"][["OWAIDENT", "GLOBALID", "geometry"]] +Zuiderzeeland["hydroobject"] = Zuiderzeeland["hydroobject"].rename(columns={"OWAIDENT": "code", "GLOBALID": "globalid"}) +Zuiderzeeland["hydroobject"]["nen3610id"] = "dummy_nen3610id_hydroobject_" + Zuiderzeeland["hydroobject"].index.astype( + str +) + +# duikersifonhevel +Zuiderzeeland["duikersifonhevel"] = Zuiderzeeland["duikersifonhevel"][["KDUIDENT", "GLOBALID", "geometry"]] +Zuiderzeeland["duikersifonhevel"] = Zuiderzeeland["duikersifonhevel"].rename( + columns={"KDUIDENT": "code", "GLOBALID": "globalid"} +) +Zuiderzeeland["duikersifonhevel"]["nen3610id"] = "dummy_nen3610id_duikersifonhevel_" + Zuiderzeeland[ + "duikersifonhevel" +].index.astype(str) + +# hevels +Zuiderzeeland["hevels"] = Zuiderzeeland["hevels"][["KWKIDENT", "GLOBALID", "geometry"]] +Zuiderzeeland["hevels"] = Zuiderzeeland["hevels"].rename(columns={"KWKIDENT": "code", "GLOBALID": "globalid"}) +Zuiderzeeland["hevels"]["nen3610id"] = "dummy_nen3610id_hevels_" + Zuiderzeeland["hevels"].index.astype(str) +# add to the duikersifonhevel +Zuiderzeeland["duikersifonhevel"] = gpd.GeoDataFrame( + pd.concat((Zuiderzeeland["duikersifonhevel"], Zuiderzeeland["hevels"])) +) + +# stuw +Zuiderzeeland["stuw"] = Zuiderzeeland["stuw"][["KWKIDENT", "GLOBALID", "geometry", "KWKSOORT"]] +Zuiderzeeland["stuw"] = Zuiderzeeland["stuw"].rename( + columns={"KWKIDENT": "code", "GLOBALID": "globalid", "KWKSOORT": "KWKsoort"} +) +Zuiderzeeland["stuw"] = Zuiderzeeland["stuw"].set_crs("EPSG:28992") +Zuiderzeeland["stuw"]["nen3610id"] = "dummy_nen3610id_stuw_" + Zuiderzeeland["stuw"].index.astype(str) + +# afsluitmiddel +Zuiderzeeland["afsluitmiddel"] = Zuiderzeeland["afsluitmiddel"][["KWKIDENT", "GLOBALID", "geometry"]] +Zuiderzeeland["afsluitmiddel"] = Zuiderzeeland["afsluitmiddel"].rename( + columns={"KWKIDENT": "code", "GLOBALID": "globalid"} +) +Zuiderzeeland["afsluitmiddel"]["nen3610id"] = "dummy_nen3610id_hevels_" + Zuiderzeeland["afsluitmiddel"].index.astype( + str +) + +# peilgebied +Zuiderzeeland["peilgebied"] = Zuiderzeeland["peilgebied"][["DHYDRO_ZMRPL", "GPGIDENT", "geometry"]] +Zuiderzeeland["peilgebied"]["nen3610id"] = "dummy_nen3610id_peilgebied_" + Zuiderzeeland["peilgebied"].index.astype(str) +Zuiderzeeland["peilgebied"]["globalid"] = "dummy_globalid_peilgebied_" + Zuiderzeeland["peilgebied"].index.astype(str) +Zuiderzeeland["peilgebied"] = Zuiderzeeland["peilgebied"].rename( + columns={"DHYDRO_ZMRPL": "streefpeil", "GPGIDENT": "code"} +) +Zuiderzeeland["peilgebied"]["globalid"] = "dummy_globalid_peilgebied_" + Zuiderzeeland["peilgebied"].index.astype(str) + +# streefpeil +Zuiderzeeland["streefpeil"] = Zuiderzeeland["peilgebied"][["streefpeil", "globalid"]] +Zuiderzeeland["streefpeil"]["geometry"] = np.nan +Zuiderzeeland["streefpeil"].rename(columns={"streefpeil": "waterhoogte"}, inplace=True) +Zuiderzeeland["streefpeil"] = gpd.GeoDataFrame(Zuiderzeeland["streefpeil"], geometry="geometry") + +# delete the streefpeil in the peilgebied for consistency +Zuiderzeeland["peilgebied"] = Zuiderzeeland["peilgebied"][["code", "globalid", "nen3610id", "geometry"]] + + +# ### Check for the correct keys and columns + + +show_layers_and_columns(waterschap=Zuiderzeeland) + + +# ### Store data + + +# Check if the directory exists. If it doesn't exist, create it + +if not os.path.exists(output_gpkg_path): + os.makedirs(output_gpkg_path) + +store_data(waterschap=Zuiderzeeland, output_gpkg_path=output_gpkg_path + "/Zuiderzeeland") From 13bc125e895b0a3f1cb51a28ef85218bf3d131e2 Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Mon, 21 Oct 2024 15:48:57 +0200 Subject: [PATCH 18/23] Run post-process_agv.py (#172) --- .../crossings_to_ribasim.py | 14 +- .../general_functions.py | 12 +- .../postprocess_data/post-process_WSRL.py | 16 +- .../postprocess_data/post-process_agv.py | 115 +------ .../postprocess_data/post-process_delfland.py | 16 +- .../postprocess_data/post-process_rijnland.py | 16 +- .../post-process_wetterskip.py | 16 +- .../post-process_zuiderzeeland.py | 16 +- .../postprocess_data/post-processing_HD.py | 16 +- .../postprocess_data/post-processing_HHNK.py | 16 +- .../postprocess_data/post-processing_HHSK.py | 16 +- .../post-processing_scheldestromen.py | 16 +- .../preprocess_data/AmstelGooienVecht.py | 8 +- .../preprocess_data/Delfland.py | 7 +- .../preprocess_data/HHNK.py | 20 +- .../preprocess_data/HHSK.py | 25 +- .../preprocess_data/Hollandse_Delta.py | 15 +- .../preprocess_data/Rijnland.py | 18 +- .../preprocess_data/Rivierenland.py | 13 +- .../preprocess_data/Scheldestromen.py | 14 +- .../preprocess_data/Wetterskip.py | 9 +- .../preprocess_data/Zuiderzeeland.py | 3 +- .../preprocess_data/general_functions.py | 319 ------------------ src/peilbeheerst_model/pyproject.toml | 2 +- 24 files changed, 85 insertions(+), 653 deletions(-) rename src/peilbeheerst_model/peilbeheerst_model/{postprocess_data => }/general_functions.py (97%) delete mode 100644 src/peilbeheerst_model/peilbeheerst_model/preprocess_data/general_functions.py diff --git a/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py b/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py index 2753b81..fe4d598 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py +++ b/src/peilbeheerst_model/peilbeheerst_model/crossings_to_ribasim.py @@ -45,7 +45,6 @@ def __init__(self, model_characteristics): def read_gpkg_layers( self, - variables=["hydroobject", "gemaal", "stuw", "peilgebied", "streefpeil", "duikersifonhevel"], print_var=False, data=None, ): @@ -53,9 +52,6 @@ def read_gpkg_layers( Parameters ---------- - variables : list, optional - List of layer names to be read from the GeoPackage, by default - ["hydroobject", "gemaal", "stuw", "peilgebied", "streefpeil", "aggregation_area", 'duikersifonhevel'] print_var : bool, optional Flag to print each layer name when reading, by default False data : _type_, optional @@ -69,11 +65,13 @@ def read_gpkg_layers( """ if data is None: data = {} - for variable in variables: + gpkg_path = self.model_characteristics["path_postprocessed_data"] + layers = gpd.list_layers(gpkg_path) + for layer in layers.name: if print_var: - print(variable) - data_temp = gpd.read_file(self.model_characteristics["path_postprocessed_data"], layer=variable) - data[variable] = data_temp + print(layer) + data_temp = gpd.read_file(gpkg_path, layer=layer) + data[layer] = data_temp return data diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/general_functions.py b/src/peilbeheerst_model/peilbeheerst_model/general_functions.py similarity index 97% rename from src/peilbeheerst_model/peilbeheerst_model/postprocess_data/general_functions.py rename to src/peilbeheerst_model/peilbeheerst_model/general_functions.py index d12a988..ca7fff6 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/general_functions.py +++ b/src/peilbeheerst_model/peilbeheerst_model/general_functions.py @@ -4,14 +4,13 @@ import pandas as pd -def read_gpkg_layers(gpkg_path, variables, engine="fiona", print_var=False): +def read_gpkg_layers(gpkg_path, engine="fiona", print_var=False): """ Read specified layers from a GeoPackage (GPKG) file and return them as a dictionary. Parameters ---------- gpkg_path (str): The file path to the GeoPackage (GPKG) file to read from. - variables (list): A list of layer names to read from the GeoPackage. print_var (bool, optional): If True, print the name of each variable as it is read. Default is False. Returns @@ -22,11 +21,12 @@ def read_gpkg_layers(gpkg_path, variables, engine="fiona", print_var=False): choose to print the names of variables as they are read by setting `print_var` to True. """ data = {} - for variable in variables: + layers = gpd.list_layers(gpkg_path) + for layer in layers.name: if print_var: - print(variable) - data_temp = gpd.read_file(gpkg_path, layer=variable, engine=engine) - data[variable] = data_temp + print(layer) + data_temp = gpd.read_file(gpkg_path, layer=layer, engine=engine) + data[layer] = data_temp return data diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py index 5474c02..6031b7d 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_WSRL.py @@ -8,7 +8,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -35,18 +36,7 @@ # Load HHNK files -WSRL = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +WSRL = read_gpkg_layers(gpkg_path=data_path) WSRL["peilgebied"] = WSRL["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py index ea285ed..d2fea1a 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_agv.py @@ -9,8 +9,8 @@ import geopandas as gpd import numpy as np import pandas as pd -from general_functions import read_gpkg_layers +from peilbeheerst_model.general_functions import read_gpkg_layers from ribasim_nl import CloudStorage # %% @@ -23,9 +23,8 @@ # %% cloud = CloudStorage() cloud.download_verwerkt(waterschap) - -# cloud.download_basisgegevens() -# cloud.download_aangeleverd("Rijkswaterstaat") +cloud.download_verwerkt("Rijkswaterstaat") +cloud.download_basisgegevens() # %% verwerkt_dir = cloud.joinpath(waterschap, "verwerkt") @@ -37,27 +36,9 @@ # Hoofdwatersysteem boundaries hws_path = cloud.joinpath("Rijkswaterstaat/verwerkt/krw_basins_vlakken.gpkg") -# Buffer boundaries -buffer_path = cloud.joinpath("Rijkswaterstaat/verwerkt/hws_buffer_agv.gpkg") - -# Buffer RWHS -rhws_path = cloud.joinpath("Rijkswaterstaat/verwerkt/agv_rhws_buffer.gpkg") - - # %% Load Files # Load HHNK files -AVG = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +AVG = read_gpkg_layers(gpkg_path=data_path) AVG["peilgebied"] = AVG["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries @@ -66,18 +47,8 @@ # Load hws gdf_hws = gpd.read_file(hws_path) -# Load buffer -gdf_buffer = gpd.read_file(buffer_path) -gdf_buffer = gdf_buffer.to_crs("EPSG:28992") -gdf_buffer = gdf_buffer.dissolve() - -# Load rhws -# gdf_rhws = gpd.read_file(rhws_path) -# gdf_rhws = gdf_rhws.to_crs('EPSG:28992') -# gdf_rhws = gdf_rhws.dissolve() - # %% -AVG["peilgebied"].globalid.is_unique +assert AVG["peilgebied"].globalid.is_unique # Select waterschap boundaries and clip hws layer @@ -89,9 +60,6 @@ # Use waterschap boundaries to clip HWS layer gdf_hws = gpd.overlay(gdf_grens, gdf_hws, how="intersection") -# Use waterschap boundaries to clip HWS layer -# gdf_rhws = gpd.overlay(gdf_grens, gdf_rhws, how='intersection') - # Peilgebied and HWS layer overlap: # 1. Identify the overlapping areas # 2. Clip @@ -122,10 +90,8 @@ peilgebieden_cat = [] for index, row in AVG["peilgebied"].iterrows(): - # if row.code == "Oosterpark" or row.code == "Vechtboezem": if "Oosterpark" in row.code or "Vechtboezem" in row.code or "Stadsboezem Amsterdam" in row.code: - print("true") - + print(f"true {row.code}") peilgebieden_cat.append(1) else: peilgebieden_cat.append(0) @@ -133,31 +99,7 @@ # Add new column and drop old HWS_BZM column AVG["peilgebied"]["peilgebied_cat"] = peilgebieden_cat -# %% Add rhws to ['peilgebied','streefpeil'] -# update peilgebied dict key -# gdf_rhws['globalid'] = 'dummy_globalid_rhws_' + gdf_rhws.index.astype(str) -# gdf_rhws['code'] = 'dummy_code_nhws_' + gdf_rhws.index.astype(str) -# gdf_rhws['nen3610id'] = 'dummy_nen3610id_rhws_' + gdf_rhws.index.astype(str) -# gdf_rhws['peilgebied_cat'] = 1 - -# gdf_rhws = gdf_rhws[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] - -# AVG['peilgebied'] = pd.concat([gdf_rhws, AVG['peilgebied']]) - -# %% -# # Create boezem streefpeil layer -# streefpeil_hws = pd.DataFrame() -# streefpeil_hws['waterhoogte'] = [np.nan] * len(gdf_rhws) -# streefpeil_hws['globalid'] = 'dummy_globalid_rhws_' + gdf_rhws.index.astype(str) -# streefpeil_hws['geometry'] = [None]* len(gdf_rhws) - -# AVG['streefpeil'] = pd.concat([streefpeil_hws, AVG['streefpeil']]) -# AVG['streefpeil'] = gpd.GeoDataFrame(AVG['streefpeil']) - -# Add nhws to ['peilgebied','streefpeil'] - -# %% -# update peilgebied dict key +# %% update peilgebied dict key gdf_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) gdf_hws["code"] = "dummy_code_nhws_" + gdf_hws.index.astype(str) gdf_hws["nen3610id"] = "dummy_nen3610id_nhws_" + gdf_hws.index.astype(str) @@ -167,8 +109,7 @@ AVG["peilgebied"] = pd.concat([gdf_hws, AVG["peilgebied"]]) -# %% -# Create boezem streefpeil layer +# %% Create boezem streefpeil layer streefpeil_hws = pd.DataFrame() streefpeil_hws["waterhoogte"] = [np.nan] * len(gdf_hws) streefpeil_hws["globalid"] = "dummy_globalid_nhws_" + gdf_hws.index.astype(str) @@ -177,44 +118,6 @@ AVG["streefpeil"] = pd.concat([streefpeil_hws, AVG["streefpeil"]]) AVG["streefpeil"] = gpd.GeoDataFrame(AVG["streefpeil"]) -# %% Create buffer polygon between NHWS and peilgebied/RHWS -# buffer_polygon = gdf_buffer.geometry.iat[0].intersection(gdf_grens.geometry.iat[0]) -# buffer_polygon = buffer_polygon.difference(shapely.geometry.MultiPolygon(gdf_hws.geometry.tolist())) -# buffer_polygon = buffer_polygon.difference(shapely.ops.unary_union(AVG['peilgebied'].geometry.tolist())) - -# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) -# buffer_polygon = buffer_polygon.set_geometry(0) -# buffer_polygon = buffer_polygon.dissolve() -# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) -# buffer_polygon = buffer_polygon.set_geometry('geometry') -# buffer_polygon = buffer_polygon.set_crs('EPSG:28992') - - -# %% Add buffer to ['peilgebied','streefpeil'] - -# update peilgebied dict key -# buffer_polygon = gpd.GeoDataFrame(buffer_polygon) -# buffer_polygon['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) -# buffer_polygon['code'] = 'dummy_code_nhws_buffer_' + buffer_polygon.index.astype(str) -# buffer_polygon['nen3610id'] = 'dummy_nen3610id_nhws_buffer_' + buffer_polygon.index.astype(str) -# buffer_polygon['peilgebied_cat'] = 2 -# buffer_polygon = buffer_polygon.rename(columns={0:'geometry'}) -# buffer_polygon = buffer_polygon[['globalid', 'code', 'nen3610id', 'peilgebied_cat', 'geometry']] - -# AVG['peilgebied'] = pd.concat([buffer_polygon, AVG['peilgebied']]) -# AVG['peilgebied'] = gpd.GeoDataFrame(AVG['peilgebied']) - -# %% -# # Create boezem streefpeil layer -# streefpeil_buffer = pd.DataFrame() -# streefpeil_buffer['waterhoogte'] = [np.nan] -# streefpeil_buffer['globalid'] = 'dummy_globalid_nhws_buffer_' + buffer_polygon.index.astype(str) -# streefpeil_buffer['geometry'] = [None] - - -# AVG['streefpeil'] = pd.concat([streefpeil_buffer, AVG['streefpeil']]) -# AVG['streefpeil'] = gpd.GeoDataFrame(AVG['streefpeil']) - # %% if remove_cat_2: AVG["peilgebied"] = AVG["peilgebied"].loc[AVG["peilgebied"].peilgebied_cat != 2] @@ -227,6 +130,6 @@ print(key) AVG[str(key)].to_file(output_gpkg_path, layer=str(key), driver="GPKG") -cloud.upload_verwerkt(output_gpkg_path) +cloud.upload_verwerkt(waterschap) # %% AVG["peilgebied"]["peilgebied_cat"].unique() diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py index fe82a3f..51a949d 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_delfland.py @@ -9,7 +9,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -36,18 +37,7 @@ # Load HHNK files -delfland = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +delfland = read_gpkg_layers(gpkg_path=data_path) delfland["peilgebied"] = delfland["peilgebied"].to_crs("EPSG:28992") diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py index 789cd48..d11963e 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_rijnland.py @@ -10,7 +10,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -37,18 +38,7 @@ # Load HHNK files -Rijnland = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +Rijnland = read_gpkg_layers(gpkg_path=data_path) Rijnland["peilgebied"] = Rijnland["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py index 4eab36f..1e66262 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_wetterskip.py @@ -8,7 +8,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -36,18 +37,7 @@ # Load HHNK files -Wetterskip = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +Wetterskip = read_gpkg_layers(gpkg_path=data_path) Wetterskip["peilgebied"] = Wetterskip["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py index cc3577d..ad9ed77 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-process_zuiderzeeland.py @@ -8,7 +8,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -35,18 +36,7 @@ # Load HHNK files -Zuiderzeeland = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +Zuiderzeeland = read_gpkg_layers(gpkg_path=data_path) Zuiderzeeland["peilgebied"] = Zuiderzeeland["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py index ec46b06..fe3c2c4 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HD.py @@ -8,7 +8,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -36,18 +37,7 @@ # Load HHNK files -HD = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +HD = read_gpkg_layers(gpkg_path=data_path) HD["peilgebied"] = HD["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py index 29eecc7..90395ae 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHNK.py @@ -8,7 +8,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -36,18 +37,7 @@ # Load HHNK files -HHNK = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +HHNK = read_gpkg_layers(gpkg_path=data_path) HHNK["peilgebied"] = HHNK["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py index 23bbcdd..1089af1 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_HHSK.py @@ -9,7 +9,8 @@ import geopandas as gpd import shapely -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -35,18 +36,7 @@ # Load HHNK files -HHSK = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +HHSK = read_gpkg_layers(gpkg_path=data_path) HHSK["peilgebied"] = HHSK["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py index 55f65b7..b45b4dd 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py +++ b/src/peilbeheerst_model/peilbeheerst_model/postprocess_data/post-processing_scheldestromen.py @@ -8,7 +8,8 @@ import geopandas as gpd import numpy as np -from general_functions import * + +from peilbeheerst_model.general_functions import * remove_cat_2 = True @@ -36,18 +37,7 @@ # Load HHNK files -Scheldestromen = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuw", - "gemaal", - "hydroobject", - "duikersifonhevel", - "peilgebied", - "streefpeil", - "aggregation_area", - ], -) +Scheldestromen = read_gpkg_layers(gpkg_path=data_path) Scheldestromen["peilgebied"] = Scheldestromen["peilgebied"].to_crs("EPSG:28992") # Load waterschap boundaries diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py index 482d593..db8ad3d 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/AmstelGooienVecht.py @@ -1,9 +1,9 @@ import geopandas as gpd import numpy as np import pandas as pd -from general_functions import show_layers_and_columns, store_data from shapely import wkt +from peilbeheerst_model.general_functions import show_layers_and_columns, store_data from ribasim_nl import CloudStorage pd.set_option("display.max_columns", None) @@ -177,6 +177,12 @@ AVG["hydroobject"] = AVG["hydroobject"].drop_duplicates(subset="globalid") # in case it is run multiple times AVG["hydroobject"] = gpd.GeoDataFrame(AVG["hydroobject"]).set_crs("epsg:28992") +# aggregation_area +AVG["aggregation_area"] = AVG["peilgebied"].copy() +AVG["aggregation_area"]["globalid"] = "dummy_globalid_agg_area_" + AVG["aggregation_area"].index.astype(str) +AVG["aggregation_area"]["code"] = ( + AVG["aggregation_area"]["code"] + "_dummy_id_" + AVG["aggregation_area"].index.astype(str) +) # # Control, store diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py index 653f493..7f7ba50 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Delfland.py @@ -3,7 +3,8 @@ import geopandas as gpd import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * # # Delfland @@ -14,9 +15,7 @@ output_gpkg_path = "../../Data_postprocessed/Waterschappen/Delfland" -Delfland = read_gpkg_layers( - gpkg_path=gdb_path, variables=["stuw", "gemaal", "watergang", "duikersifonhevel", "peilgebiedpraktijk", "keerschot"] -) +Delfland = read_gpkg_layers(gpkg_path=gdb_path) # 'peilafwijkinggebied', # 'pomp']) # 'streefpeil']) diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py index afe2333..0ddb13d 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHNK.py @@ -2,7 +2,8 @@ import geopandas as gpd import numpy as np import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * # # Hollands Noorderkwartier @@ -16,24 +17,11 @@ # retrieve the data -HHNK = read_gpkg_layers( - gpkg_path=gpkg_path_HHNK, - variables=[ - # 'stuw', #nalevering - # 'gemaal', #nalevering - "afsluitmiddel", - "hydroobject", - "duikersifonhevel", - ], -) -# 'peilafwijkinggebied', -# 'peilgebiedpraktijk', -# 'pomp']) -# 'streefpeil']) +HHNK = read_gpkg_layers(gpkg_path=gpkg_path_HHNK) # retrieve data from a gdb, as the gpkg of HHNK does not contain all relevant data data_gdb = gpd.read_file(gdb_path_HHNK, layer="BWN_ruimtekaart") -HHNK_nalevering = read_gpkg_layers(gpkg_path=gdb_path_HHNK_nalevering, variables=["stuw", "gemaal"]) # nalevering +HHNK_nalevering = read_gpkg_layers(gpkg_path=gdb_path_HHNK_nalevering) # nalevering HHNK["stuw"] = HHNK_nalevering["stuw"] HHNK["gemaal"] = HHNK_nalevering["gemaal"] diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py index 55bec86..6b8a91a 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/HHSK.py @@ -4,7 +4,8 @@ import geopandas as gpd import numpy as np import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * pd.set_option("display.max_columns", None) @@ -15,32 +16,16 @@ output_gpkg_path = "../../Data_postprocessed/Waterschappen/HHSK" -HHSK = read_gpkg_layers( - gpkg_path=path_HHSK, - variables=[ - "stuw", - "gemaal", - "afsluitmiddel", - "duikersifonhevel", - "hydroobject", - # 'peilgebiedvigerend', - # 'peilafwijkinggebied', - # 'peilbesluitgebied', - "streefpeil", - ], - engine="pyogrio", -) +HHSK = read_gpkg_layers(gpkg_path=path_HHSK, engine="pyogrio") HHSK_nalevering = read_gpkg_layers( - gpkg_path=r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\HHSK\LHM_hhsk_nalevering.gpkg", - variables=["Persleiding"], + gpkg_path=r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\HHSK\LHM_hhsk_nalevering.gpkg" ) HHSK_2nalevering = read_gpkg_layers( - gpkg_path=r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\HHSK\hhsklevering_07032024.gpkg", - variables=["PeilgebiedPraktijk", "VigerendPeilgebiedZPP"], + gpkg_path=r"D:\Users\Bruijns\Documents\PR4750_20\Data_preprocessed\Waterschappen\HHSK\hhsklevering_07032024.gpkg" ) diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py index 567ed70..55f7664 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Hollandse_Delta.py @@ -4,7 +4,8 @@ import geopandas as gpd import numpy as np import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * # # HD @@ -15,17 +16,7 @@ output_gpkg_path = "../../Data_postprocessed/Waterschappen/Hollandse_Delta" -HD = read_gpkg_layers( - gpkg_path=data_path, - variables=[ - "stuwen", - "gemalen", - "afsluitmiddelen", - "sluizen", - "HydroObjectWatergangtype", # = hydroobject - "HydroObjectKunstwerkvakken", - ], -) # = duikersifonhevel +HD = read_gpkg_layers(gpkg_path=data_path) # change names HD["stuw"] = HD.pop("stuwen") diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py index ef8b895..89fc438 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rijnland.py @@ -5,7 +5,8 @@ import matplotlib.pyplot as plt import numpy as np import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * pd.set_option("display.max_columns", None) @@ -16,20 +17,7 @@ output_gpkg_path = "../../Data_postprocessed/Waterschappen/Rijnland" -Rijnland = read_gpkg_layers( - gpkg_path=path_Rijnland, - variables=[ - "stuw", - "gemaal", - "afsluitmiddel", - "duikersifonhevel", - "hydroobject", - "peilgebiedvigerend", - "peilgebiedpraktijk", - "peilafwijkinggebied", - "streefpeil", - ], -) +Rijnland = read_gpkg_layers(gpkg_path=path_Rijnland) # # Rijnland diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py index b7eb5a2..1a91055 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Rivierenland.py @@ -5,7 +5,8 @@ import matplotlib.pyplot as plt import numpy as np import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * pd.set_option("display.max_columns", None) @@ -28,13 +29,9 @@ gpkg_path = os.path.join(root, file) if WSRL == {}: - WSRL = read_gpkg_layers( - gpkg_path=gpkg_path, variables=["Stuw", "Gemaal", "Hydroobject", "Duikersifonhevel"] - ) + WSRL = read_gpkg_layers(gpkg_path=gpkg_path) else: - temp_WSRL = read_gpkg_layers( - gpkg_path=gpkg_path, variables=["Stuw", "Gemaal", "Hydroobject", "Duikersifonhevel"] - ) + temp_WSRL = read_gpkg_layers(gpkg_path=gpkg_path) for variable in WSRL.keys(): WSRL[variable] = pd.concat([WSRL[variable], temp_WSRL[variable]]).reset_index(drop=True) @@ -43,7 +40,7 @@ gdb_path = r"..\..\Data_preprocessed\Waterschappen\WSRL\OverigeGegevens.gdb" -WSRL_gdb = read_gpkg_layers(gpkg_path=gdb_path, variables=["PeilgebiedenPraktijk"]) +WSRL_gdb = read_gpkg_layers(gpkg_path=gdb_path) # add the gdb to the dict # WSRL['peilgebiedafwijking'] = WSRL_gdb['Peilafwijkingen'] diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py index 6ccca61..528db26 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Scheldestromen.py @@ -4,7 +4,8 @@ import geopandas as gpd import numpy as np import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * pd.set_option("display.max_columns", None) @@ -17,17 +18,6 @@ Scheldestromen = read_gpkg_layers( gpkg_path=path_Scheldestromen, - variables=[ - "stuw", - "gemaal", - # 'afsluitmiddel', - "duikersifonhevel", - "hydroobject", - # 'peilgebiedvigerend', - # 'peilgebiedpraktijk', - # 'peilafwijkinggebied', - # 'streefpeil', - ], engine="pyogrio", ) diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py index ab07e73..8f155ee 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Wetterskip.py @@ -1,7 +1,8 @@ # import packages and functions import geopandas as gpd import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * pd.set_option("display.max_columns", None) @@ -23,12 +24,6 @@ # retrieve the data Wetterskip = read_gpkg_layers( gpkg_path=gpkg_path_Wetterskip, - variables=["stuw", "gemaal", "afsluitmiddel", "hydroobject", "duikersifonhevel"], - # 'peilmerk', - # 'streefpeil', - # 'peilgebiedpraktijk', - # 'peilgebiedvigerend', - # 'peilbesluitgebied'], print_var=False, ) diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py index a41fc62..54a8d3e 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py +++ b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/Zuiderzeeland.py @@ -5,7 +5,8 @@ import geopandas as gpd import numpy as np import pandas as pd -from general_functions import * + +from peilbeheerst_model.general_functions import * pd.set_option("display.max_columns", None) diff --git a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/general_functions.py b/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/general_functions.py deleted file mode 100644 index 0e40153..0000000 --- a/src/peilbeheerst_model/peilbeheerst_model/preprocess_data/general_functions.py +++ /dev/null @@ -1,319 +0,0 @@ -# import packages and functions - -import geopandas as gpd -import matplotlib.pyplot as plt -import pandas as pd - - -def read_gpkg_layers(gpkg_path, variables, engine="fiona", print_var=False): - """ - Read specified layers from a GeoPackage (GPKG) file and return them as a dictionary. - - Parameters - ---------- - gpkg_path (str): The file path to the GeoPackage (GPKG) file to read from. - variables (list): A list of layer names to read from the GeoPackage. - print_var (bool, optional): If True, print the name of each variable as it is read. Default is False. - - Returns - ------- - dict: A dictionary containing the GeoDataFrames, with layer names as keys. - - This function reads specified layers from a GeoPackage (GPKG) file and returns them as a dictionary. You can - choose to print the names of variables as they are read by setting `print_var` to True. - """ - data = {} - for variable in variables: - if print_var: - print(variable) - data_temp = gpd.read_file(gpkg_path, layer=variable, engine=engine) - data[variable] = data_temp - - return data - - -def show_layers_and_columns(waterschap): - """ - Display Information About Layers and Columns in a Geospatial Dataset. - - Parameters - ---------- - waterschap (dict): A dictionary containing geospatial datasets as GeoDataFrames. - - Returns - ------- - None - - This function prints the names of all layers and the columns within each layer of a geospatial dataset stored - in a dictionary. - - """ - for key in waterschap.keys(): - print(key) - print(waterschap[str(key)].columns.values) - print("type = ", type(waterschap[str(key)])) - print("crs = ", waterschap[str(key)].crs) - print() - - -def store_data(waterschap, output_gpkg_path): - """ - Store Geospatial Data to a GeoPackage (GPKG) File. - - Parameters - ---------- - waterschap (dict): A dictionary containing GeoDataFrames to be stored in the GPKG file. - output_gpkg_path (str): The file path (including the file name without extension) to save the GPKG file. - - Returns - ------- - None - - This function stores geospatial data from a dictionary of GeoDataFrames into a GeoPackage (GPKG) file. - - Parameters - ---------- - - waterschap: A dictionary where the keys represent layer names, and the values are GeoDataFrames. - - output_gpkg_path: The file path for the output GPKG file. The '.gpkg' extension is added automatically. - """ - for key in waterschap.keys(): - waterschap[str(key)].to_file(output_gpkg_path + ".gpkg", layer=str(key), driver="GPKG") - - -def overlapping_peilgebieden(waterschap_peilgebieden): - """ - Identify and calculate the percentage of overlapping peilgebieden. - - Parameters - ---------- - waterschap_peilgebieden (geopandas.GeoDataFrame): A GeoDataFrame containing polygons (the peilgebieden). - - Returns - ------- - geopandas.GeoDataFrame: A GeoDataFrame with overlapping polygons and their overlap percentages. - - This function analyzes a GeoDataFrame of peilgebied polygons to find overlapping polygons and calculate - the percentage of overlap between them. It returns a GeoDataFrame with information about the overlapping - polygons, including their overlap percentages. - - Parameters - ---------- - - waterschap_peilgebieden: A GeoDataFrame containing the peilgebieden polygons. - """ - peilgebied = waterschap_peilgebieden - peilgebied.geometry = peilgebied.buffer(distance=0) # make invalid geometries valid - peilgebied.set_crs(crs="EPSG:28992", inplace=True) - - # Create an empty GeoDataFrame to store the overlapping polygons and additional information - overlapping_polygons = gpd.GeoDataFrame(columns=peilgebied.columns) - - # Iterate through each polygon in peilgebied - for index, row in peilgebied.iterrows(): - current_polygon = peilgebied.iloc[[index]] # select the current polygon - other_polygons = peilgebied.drop(index) # create a GeoDataFrame without the current polygon - overlaps = other_polygons[ - other_polygons.geometry.overlaps(current_polygon.geometry.iloc[0]) - ] # check for overlaps with other polygons - - if not overlaps.empty: - # calculate the percentage of overlap, and add this to the gdf including the overlapping indexes - current_overlap_percentage = ( - overlaps.geometry.intersection(current_polygon.geometry.iloc[0]).area - / current_polygon.geometry.iloc[0].area - * 100 - ) - - overlaps["overlap_percentage"], overlaps["source_globalid"] = pd.NA, pd.NA # create columns - - # fill columns - overlaps["overlap_percentage"] = ( - current_overlap_percentage # multiple peilgebieden will be added to the temporal gdf if there are multiple overlapping polygons - ) - overlaps["source_globalid"] = current_polygon["globalid"].values[ - 0 - ] # add the global id of the current polygon. - - # add to the results - overlapping_polygons = pd.concat([overlapping_polygons, overlaps]) - - return overlapping_polygons - - -def plot_histogram_overlap(overlapping_polygons): - """ - Plots a histogram of the overlapping polygons in a DataFrame. - - Parameters - ---------- - overlapping_polygons (pd.DataFrame): A DataFrame containing information about overlapping polygons. - It should have a 'overlap_percentage' column to represent the percentage of overlap between polygons. - - Returns - ------- - None - - The function calculates a histogram of overlapping percentages, providing insights into the distribution of overlaps - between polygons. It handles potential NaN values in the 'overlap_percentage' column and creates bins ranging - from 0% to 100% in 10% increments for the histogram. The number of overlapping polygons is displayed in the title. - - """ - overlapping_polygons["overlap_percentage"] = overlapping_polygons["overlap_percentage"].fillna( - 0 - ) # Handle potential NaN values - bins = range(0, 101, 10) # Create bins from 0% to 100% in 10% increments - - # Create the histogram - plt.hist(overlapping_polygons["overlap_percentage"], bins=bins, color="cornflowerblue", edgecolor="k") - - # Set labels and title - plt.xlabel("Overlap [%]") - plt.ylabel("Frequency [#]") # Update the y-axis label - # plt.yscale('log') # Set the y-axis scale to 'log' - plt.ylim(0, 15) - plt.suptitle("Histogram of overlapping percentages") - plt.title(f"Number of overlapping polygons = {len(overlapping_polygons)}", fontsize=8) - plt.show() - - -def plot_overlapping_peilgebieden(peilgebied, overlapping_polygons, minimum_percentage): - """ - Plot Overlapping Peilgebieden on a map, including a Minimum Percentage of Overlap to show. - - Parameters - ---------- - peilgebied (geopandas.GeoDataFrame): A GeoDataFrame representing the peilgebied polygons. - overlapping_polygons (geopandas.GeoDataFrame): A GeoDataFrame containing information about overlapping polygons/peilgebieden. - minimum_percentage (float or int): The minimum overlap percentage required for polygons to be displayed. - - Returns - ------- - None - - This function creates a plot to visualize overlapping peilgebieden based on a specified minimum overlap percentage. - It displays a subset of overlapping polygons with a percentage greater than the specified minimum. - - Parameters - ---------- - - peilgebied: The entire peilgebieden GeoDataFrame serving as the background. - - overlapping_polygons: GeoDataFrame containing information about overlapping polygons. - - minimum_percentage: The minimum overlap percentage required for polygons to be displayed. - - """ - # make a subsect of overlapping polygons, based on a percentage - overlap_subset = overlapping_polygons.loc[overlapping_polygons["overlap_percentage"] > minimum_percentage] - - # plot - fig, ax = plt.subplots() - peilgebied.plot(ax=ax, color="lightgray") # background - overlap_subset.plot( - ax=ax, cmap="coolwarm", column=overlap_subset.overlap_percentage, label="Percentage of overlap", legend=True - ) - - plt.show() - - -# def intersect_using_spatial_index(peilgebied_praktijk, peilgebied_afwijking, check): -# """ -# Conduct spatial intersection using spatial index for candidates GeoDataFrame to make queries faster. -# Note, with this function, you can have multiple Polygons in the 'intersecting_gdf' and it will return all the points -# intersect with ANY of those geometries. -# """ -# peilgebied_praktijk_sindex = peilgebied_praktijk.sindex -# possible_matches_index = [] - -# # 'itertuples()' function is a faster version of 'iterrows()' -# for other in peilgebied_afwijking.itertuples(): -# bounds = other.geometry.bounds -# c = list(peilgebied_praktijk_sindex.intersection(bounds)) -# possible_matches_index += c - -# # Get unique candidates -# unique_candidate_matches = list(set(possible_matches_index)) -# possible_matches = peilgebied_praktijk.iloc[unique_candidate_matches] - -# possible_matches.to_file('possible_matches_Rijnland.shp') -# un_un = possible_matches.intersects(peilgebied_afwijking.unary_union) -# # print('un_un =') -# # display(un_un) -# # print() - -# # print('possible_matches =') -# # display(possible_matches) -# # print() - -# # print('overlapping_pg_praktijk =') -# # display(possible_matches[un_un]) - -# # possible_matches[un_un].to_file('peilgebied_afwijking_unary_union_Rijnland.shp') - - -# # Conduct the actual intersect -# overlapping_pg_praktijk = possible_matches.loc[un_un] #the entire peilgebied praktijk polygons - - -# #remove the peilgebied afwijking from the peilgebied praktijk -# intersection = gpd.overlay(overlapping_pg_praktijk, peilgebied_afwijking, how='intersection') - -# #fix possible invalid geometries -# overlapping_pg_praktijk['geometry'] = overlapping_pg_praktijk.buffer(distance = 0) -# peilgebied_afwijking['geometry'] = peilgebied_afwijking.buffer(distance = 0) - -# overlapping_updated = gpd.overlay(peilgebied_praktijk, intersection, how='symmetric_difference') ##remove the difference between pg_praktijk and pg_afwijking -# peilgebied = overlapping_updated.append(intersection, ignore_index=True) #add the removed difference, but now only the intersected part of pg_afwijking - - -# if check: -# peilgebied_praktijk.to_file('Checks/Rivierenland/peilgebied_praktijk.gpkg', driver='GPKG') -# peilgebied_afwijking.to_file('Checks/Rivierenland/peilgebied_afwijking.gpkg', driver='GPKG') - -# intersection.to_file('Checks/Rivierenland/intersection.gpkg', driver='GPKG') -# overlapping_updated.to_file('Checks/Rivierenland/overlapping_updated.gpkg', driver='GPKG') -# peilgebied.to_file('Checks/Rivierenland/peilgebied.gpkg', driver='GPKG') - -# return peilgebied - - -def burn_in_peilgebieden(base_layer, overlay_layer, plot=True): - # remove the overlapping parts from the base_layer - base_layer_without_overlapping = gpd.overlay( - base_layer, overlay_layer, how="symmetric_difference", keep_geom_type=False - ) ##remove the difference between pg_praktijk and pg_afwijking - - # fill each column - base_layer_without_overlapping.code_1.fillna(value=base_layer_without_overlapping.code_2, inplace=True) - base_layer_without_overlapping.nen3610id_1.fillna(value=base_layer_without_overlapping.nen3610id_2, inplace=True) - base_layer_without_overlapping.globalid_1.fillna(value=base_layer_without_overlapping.globalid_2, inplace=True) - # base_layer_without_overlapping.waterhoogte_1.fillna(value = base_layer_without_overlapping.waterhoogte, inplace=True) - - if ( - "waterhoogte_1" in base_layer_without_overlapping.keys() - ): # sometimes a waterhoogte is present in the peilgebieden. Manage this. - base_layer_without_overlapping.rename( - columns={ - "code_1": "code", - "nen3610id_1": "nen3610id", - "globalid_1": "globalid", - "waterhoogte_1": "waterhoogte", - }, - inplace=True, - ) - base_layer_without_overlapping.drop( - columns=["code_2", "nen3610id_2", "globalid_2", "waterhoogte_2"], inplace=True - ) - - else: - base_layer_without_overlapping.rename( - columns={"code_1": "code", "nen3610id_1": "nen3610id", "globalid_1": "globalid"}, inplace=True - ) - base_layer_without_overlapping.drop(columns=["code_2", "nen3610id_2", "globalid_2"], inplace=True) - - burned_base_layer = pd.concat([pd.DataFrame(base_layer_without_overlapping), pd.DataFrame(overlay_layer)], axis=0) - - burned_base_layer = burned_base_layer.drop_duplicates(subset="globalid", keep="last") - - if plot: - fig, ax = plt.subplots() - base_layer.plot(ax=ax, color="cornflowerblue") - overlay_layer.plot(ax=ax, color="blue") - - return burned_base_layer diff --git a/src/peilbeheerst_model/pyproject.toml b/src/peilbeheerst_model/pyproject.toml index a2770e4..856c664 100644 --- a/src/peilbeheerst_model/pyproject.toml +++ b/src/peilbeheerst_model/pyproject.toml @@ -14,7 +14,7 @@ authors = [ license = { text = "MIT" } requires-python = ">=3.10" -dependencies = ["geopandas", "numpy", "pandas", "pydantic", "shapely", "tqdm"] +dependencies = ["geopandas", "numpy", "pandas", "pydantic", "shapely", "tqdm", "matplotlib"] dynamic = ["version"] [project.optional-dependencies] From 5fd9a4e53c85f923dc4a27d36f37306d39a42190 Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Mon, 21 Oct 2024 16:35:37 +0200 Subject: [PATCH 19/23] AGV reproducible parse_crossings (#173) This creates a new script `src\peilbeheerst_model\parse_crossings\AmstelGooienVecht.py` that is hooked up to thegoodcloud. I left the one with all waterboards intact. In the new script I didn't include the plotting code. --- docs/workflow/level-controlled.qmd | 18 ++++----- src/peilbeheerst_model/01_parse_crossings.py | 5 ++- .../01b_ad_krw_to_peilgebieden.py | 4 +- .../parse_crossings/AmstelGooienVecht.py | 38 +++++++++++++++++++ 4 files changed, 53 insertions(+), 12 deletions(-) create mode 100644 src/peilbeheerst_model/parse_crossings/AmstelGooienVecht.py diff --git a/docs/workflow/level-controlled.qmd b/docs/workflow/level-controlled.qmd index 50e58d4..552f76c 100644 --- a/docs/workflow/level-controlled.qmd +++ b/docs/workflow/level-controlled.qmd @@ -6,17 +6,17 @@ Workflow for deriving level controlled (peilbeheerst) regional models. All code can be found under [`src/peilbeheerst_model`](https://github.com/Deltares/Ribasim-NL/tree/main/src/peilbeheerst_model). The paths below are relative to this path. -1. Run the preprocessing notebooks. One notebook per water board, path: `peilbeheerst_model/preprocess_data/` -2. Run the postprocessing notebook. One notebook per water board, path: `peilbeheerst_model/postprocess_data/` -3. Run the crossings notebook. One notebook, path: `01_parse_crossings.ipynb` -4. Run shortest paths notebooks. One notebook per water board, path: `Shortest_path/` -5. Run crossings to Ribasim notebook. One notebook, all water boards are below each other, path: `02_crossings_to_ribasim_notebook.ipynb` -6. Run parametrize notebooks. One notebook per water board, for now only Amstel, Gooi en Vecht (AGV), path: `Parametrize/AmstelGooienVecht_parametrize.ipynb` +1. Run the preprocessing scripts. One script per water board, path: `peilbeheerst_model/preprocess_data/` +2. Run the postprocessing script. One script per water board, path: `peilbeheerst_model/postprocess_data/` +3. Run the crossings script. One script, path: `01_parse_crossings.ipynb`. Moving to one script per water board under `peilbeheerst_model/parse_crossings/`. +4. Run shortest paths scripts. One script per water board, path: `Shortest_path/` +5. Run crossings to Ribasim script. One script, all water boards are below each other, path: `02_crossings_to_ribasim_notebook.ipynb`. Moving to one script per water board under `peilbeheerst_model/crossings_to_ribasim/`. +6. Run parametrize scripts. One script per water board, for now only Amstel, Gooi en Vecht (AGV), path: `Parametrize/AmstelGooienVecht_parametrize.ipynb` -We originally had more parametrize notebooks, but because so much has changed I have now saved these in our backup. +We originally had more parametrize scripts, but because so much has changed I have now saved these in our backup. We will only use these to see if there were any additional manual adjustments. For the rest, it follows the same workflow as `AmstelGooienVecht_parametrize.ipynb`. -Finally: step 1 started with a clear notebook per water board. -During the process of 1.5 years, these notebooks have become increasingly larger and more confusing, whereby not every line is needed anymore. +Finally: step 1 started with a clear script per water board. +During the process of 1.5 years, these scripts have become increasingly larger and more confusing, whereby not every line is needed anymore. For now, there is no priority to clean this up, partly because this is a major risk that the data will (unintentionally) change, which will change the networks and the feedback forms can no longer be used. diff --git a/src/peilbeheerst_model/01_parse_crossings.py b/src/peilbeheerst_model/01_parse_crossings.py index e0ff7b2..54b23a7 100644 --- a/src/peilbeheerst_model/01_parse_crossings.py +++ b/src/peilbeheerst_model/01_parse_crossings.py @@ -25,8 +25,9 @@ for waterschap, waterschap_struct in waterschap_data.items(): print(f"\n{waterschap}...") - init_settings, crossing_settings = waterschap_struct.values() - init_settings["logfile"] = pathlib.Path(init_settings["output_path"]).with_suffix("").with_suffix(".log") + crossing_settings = waterschap_struct["find_crossings_with_peilgebieden"] + init_settings = waterschap_struct["init"] + init_settings["logfile"] = pathlib.Path(init_settings["output_path"]).with_suffix(".log") if waterschap not in ["HHNK"]: continue diff --git a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py index dbe31c1..17b827e 100644 --- a/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py +++ b/src/peilbeheerst_model/01b_ad_krw_to_peilgebieden.py @@ -21,7 +21,9 @@ for waterschap, waterschap_struct in waterschap_data.items(): print(f"\n{waterschap}...") - init_settings, crossing_settings = waterschap_struct.values() + crossing_settings = waterschap_struct["find_crossings_with_peilgebieden"] + init_settings = waterschap_struct["init"] + gpkg = pathlib.Path(init_settings["output_path"]) if not gpkg.exists(): raise ValueError(gpkg) diff --git a/src/peilbeheerst_model/parse_crossings/AmstelGooienVecht.py b/src/peilbeheerst_model/parse_crossings/AmstelGooienVecht.py new file mode 100644 index 0000000..6655907 --- /dev/null +++ b/src/peilbeheerst_model/parse_crossings/AmstelGooienVecht.py @@ -0,0 +1,38 @@ +# %% + +from peilbeheerst_model import ParseCrossings, waterschap_data +from ribasim_nl import CloudStorage + +# %% +waterschap = "AmstelGooienVecht" +waterschap_struct = waterschap_data[waterschap] + +cloud = CloudStorage() +verwerkt_dir = cloud.joinpath(waterschap, "verwerkt") +cloud.download_verwerkt(waterschap) +cloud.download_basisgegevens() + +# %% + +crossing_settings = waterschap_struct["find_crossings_with_peilgebieden"] +init_settings = waterschap_struct["init"] + +init_settings["gpkg_path"] = verwerkt_dir / "postprocessed.gpkg" +init_settings["krw_path"] = cloud.joinpath("Basisgegevens/KRW/KRW_lichamen_per_waterschap.gpkg") +init_settings["output_path"] = verwerkt_dir / "crossings.gpkg" +init_settings["logfile"] = verwerkt_dir / "crossings.log" + +# Crossings class initializeren +cross = ParseCrossings(**init_settings) + +# Crossings bepalen en wegschrijven +if crossing_settings["filterlayer"] is None: + df_hydro = cross.find_crossings_with_peilgebieden("hydroobject", **crossing_settings) + cross.write_crossings(df_hydro) +else: + df_hydro, df_dsf, df_hydro_dsf = cross.find_crossings_with_peilgebieden("hydroobject", **crossing_settings) + cross.write_crossings(df_hydro, crossing_settings["filterlayer"], df_dsf, df_hydro_dsf) + +# %% + +cloud.upload_verwerkt(waterschap) From 0e367e1f219312ccf9d4c229f6de2e73a8cf3134 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Mon, 21 Oct 2024 17:20:17 +0200 Subject: [PATCH 20/23] Fix limburg (#175) --- notebooks/limburg/01_fix_model_network.py | 309 ++++++++++++++++++++++ 1 file changed, 309 insertions(+) create mode 100644 notebooks/limburg/01_fix_model_network.py diff --git a/notebooks/limburg/01_fix_model_network.py b/notebooks/limburg/01_fix_model_network.py new file mode 100644 index 0000000..2b00ea6 --- /dev/null +++ b/notebooks/limburg/01_fix_model_network.py @@ -0,0 +1,309 @@ +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin, level_boundary, manning_resistance, outlet + +from ribasim_nl import CloudStorage, Model, NetworkValidator + +cloud = CloudStorage() + +authority = "Limburg" +short_name = "limburg" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + +hydroobject_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject", fid_as_index=True +) + +duiker_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="duikersifonhevel", fid_as_index=True +) + + +# %% read model +model = Model.read(ribasim_toml) +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_fix_model_network", f"{short_name}.toml") +network_validator = NetworkValidator(model) + +# %% some stuff we'll need again +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +level_data = level_boundary.Static(level=[0]) + +basin_data = [ + basin.Profile(level=[0.0, 1.0], area=[0.01, 1000.0]), + basin.Static( + drainage=[0.0], + potential_evaporation=[0.001 / 86400], + infiltration=[0.0], + precipitation=[0.005 / 86400], + ), + basin.State(level=[0]), +] +outlet_data = outlet.Static(flow_rate=[100]) + + +# HIER KOMEN ISSUES + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426118811 + +# Verwijderen edge met 0m lengte +model.remove_node(2434, remove_edges=True) +model.remove_node(1308, remove_edges=True) +model.merge_basins(basin_id=2396, to_basin_id=1669, are_connected=False) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426151899 + +# Corrigeren ontbrekende basins en outlets nabij modelrand +# geen kdu bij 2202 +geometry = hydroobject_gdf.at[3099, "geometry"] +basin_node = model.basin.add(Node(geometry=geometry.boundary.geoms[0]), tables=basin_data) +outlet_node = model.outlet.add(Node(geometry=geometry.interpolate(271)), tables=[outlet_data]) +model.redirect_edge(edge_id=2202, from_node_id=outlet_node.node_id) +model.edge.add(basin_node, outlet_node) + +for fid, edge_id, boundary_node_id in ((2054, 2244, 63), (9794, 2295, 103), (9260, 2297, 105), (3307, 2305, 113)): + kdu = duiker_gdf.loc[fid] + basin_node = model.basin.add( + Node(geometry=model.edge.df.loc[edge_id, "geometry"].boundary.geoms[0]), tables=basin_data + ) + outlet_node = model.outlet.add( + Node( + name=kdu.code, geometry=kdu.geometry.interpolate(0.5, normalized=True), meta_object_type="duikersifonhevel" + ), + tables=[outlet_data], + ) + model.redirect_edge(edge_id=edge_id, from_node_id=outlet_node.node_id) + model.edge.add(basin_node, outlet_node) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426258242 + +# Corrigeren netwerk bij Jeker +for node_id in [276, 2003, 990, 2395, 989]: + model.remove_node(node_id, remove_edges=True) + +basin_node = model.basin.add(Node(geometry=model.edge.df.at[2257, "geometry"].boundary.geoms[1])) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[2099, "geometry"].interpolate(0.9, normalized=True)), tables=[outlet_data] +) +model.redirect_edge(edge_id=2257, to_node_id=basin_node.node_id) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[82]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426373368 + +# Corrigeren Snelle Loop bij Defensiekanaal +outlet_node = model.outlet.add( + Node(geometry=model.edge.df.at[2357, "geometry"].boundary.geoms[1]), tables=[outlet_data] +) +model.redirect_edge(edge_id=2357, to_node_id=outlet_node.node_id) +model.edge.add(outlet_node, model.basin[1452]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426401489 + +# Corrigeren Panheelsebeek +model.remove_node(node_id=940, remove_edges=True) +model.reverse_edge(edge_id=211) +model.merge_basins(basin_id=2465, to_basin_id=1340, are_connected=False) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426443778 + +# Toevoegen Wellse Molenbeek +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[3100, "geometry"].boundary.geoms[1]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[687, "geometry"].interpolate(0.9, normalized=True)), tables=[outlet_data] +) + +model.redirect_edge(edge_id=2240, to_node_id=basin_node.node_id) +model.redirect_edge(edge_id=2239, from_node_id=basin_node.node_id, to_node_id=outlet_node.node_id) +model.edge.add(basin_node, model.manning_resistance[425]) +model.edge.add(outlet_node, model.level_boundary[59]) + + +# %% +model.remove_node(node_id=1036, remove_edges=True) +kdu = duiker_gdf.loc[4664] +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[477, "geometry"].boundary.geoms[0]), tables=basin_data) +outlet_node = model.outlet.add( + Node(name=kdu.code, geometry=kdu.geometry.interpolate(0.5, normalized=True), meta_object_type="duikersifonhevel"), + tables=[outlet_data], +) +model.edge.add(outlet_node, model.basin[1389]) +model.edge.add(basin_node, outlet_node) + +kdu = duiker_gdf.loc[3709] +outlet_node = model.outlet.add( + Node(name=kdu.code, geometry=kdu.geometry.interpolate(0.5, normalized=True), meta_object_type="duikersifonhevel"), + tables=[outlet_data], +) + +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[39]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426653554 + +# Correctie Panheelderbeek bij kanaal Wessem-Nederweert +model.remove_edges(edge_ids=[2316, 2309, 2307, 2308, 2310, 2312, 2315, 2317]) +model.remove_node(114, remove_edges=True) +model.reverse_edge(edge_id=1999) +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[1649, "geometry"].boundary.geoms[1]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[4110, "geometry"].boundary.geoms[1]), tables=[outlet_data] +) + +model.edge.add(model.tabulated_rating_curve[270], basin_node) +model.edge.add(basin_node, model.manning_resistance[1316]) +model.edge.add(basin_node, model.manning_resistance[1315]) +model.edge.add(basin_node, model.manning_resistance[1130]) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[115]) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426706167 + +# Correctie edge-richting bij Ijsselsteinseweg + +model.reverse_edge(edge_id=2332) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426763136 + +# Opname Helenavaart +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[112, "geometry"].boundary.geoms[0]), tables=basin_data) +model.redirect_edge(edge_id=2329, to_node_id=basin_node.node_id) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[112, "geometry"].interpolate(0.9, normalized=True)), tables=[outlet_data] +) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[123]) + +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[1702, "geometry"].interpolate(0.1, normalized=True)), tables=[outlet_data] +) +model.redirect_edge(edge_id=2328, to_node_id=outlet_node.node_id) +model.edge.add(outlet_node, basin_node) + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426789675 + +# Opname Oude Helenavaart/kanaal van Deurne +basin_node = model.basin.add(Node(geometry=hydroobject_gdf.at[1565, "geometry"].boundary.geoms[0]), tables=basin_data) +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[3277, "geometry"].interpolate(0.9, normalized=True)), tables=[outlet_data] +) + +model.redirect_edge(edge_id=2327, from_node_id=outlet_node.node_id) +model.edge.add(basin_node, outlet_node) + +outlet_node = model.outlet.add( + Node(geometry=hydroobject_gdf.at[1565, "geometry"].interpolate(0.98, normalized=True)), tables=[outlet_data] +) +model.redirect_edge(edge_id=2323, from_node_id=outlet_node.node_id) +model.edge.add(basin_node, outlet_node) + +model.redirect_edge(edge_id=2326, to_node_id=basin_node.node_id) +model.redirect_edge(edge_id=2325, to_node_id=basin_node.node_id) + +model.edge.add(model.tabulated_rating_curve[238], basin_node) + +# 2 edges die afwateren op Oude Helenavaart +model.remove_edges(edge_ids=[2322, 2324]) + + +# %% https://github.com/Deltares/Ribasim-NL/issues/154#issuecomment-2426816843 + +# Verwijderen afwaterende basisn Mooks kanaal +model.remove_node(34, remove_edges=True) + +# EINDE ISSUES + + +# %% +# corrigeren knoop-topologie + +# ManningResistance bovenstrooms LevelBoundary naar Outlet +for row in network_validator.edge_incorrect_type_connectivity().itertuples(): + model.update_node(row.from_node_id, "Outlet", data=[outlet_data]) + +# Inlaten van ManningResistance naar Outlet +for row in network_validator.edge_incorrect_type_connectivity( + from_node_type="LevelBoundary", to_node_type="ManningResistance" +).itertuples(): + model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) + + +## UPDATEN STATIC TABLES + +# %% +# basin-profielen/state updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), + "level": [0.0, 1.0] * len(model.basin.node.df), + "area": [0.01, 1000.0] * len(model.basin.node.df), + } +) +df.index.name = "fid" +model.basin.profile.df = df + +df = model.basin.profile.df.groupby("node_id")[["level"]].max().reset_index() +df.index.name = "fid" +model.basin.state.df = df + +# %% +# tabulated_rating_curves updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.tabulated_rating_curve.node.df.index.to_numpy(), 2), + "level": [0.0, 5] * len(model.tabulated_rating_curve.node.df), + "flow_rate": [0, 0.1] * len(model.tabulated_rating_curve.node.df), + } +) +df.index.name = "fid" +model.tabulated_rating_curve.static.df = df + + +# %% + +# level_boundaries updaten +df = pd.DataFrame( + { + "node_id": model.level_boundary.node.df.index.to_list(), + "level": [0.0] * len(model.level_boundary.node.df), + } +) +df.index.name = "fid" +model.level_boundary.static.df = df + +# %% +# manning_resistance updaten +length = len(model.manning_resistance.node.df) +df = pd.DataFrame( + { + "node_id": model.manning_resistance.node.df.index.to_list(), + "length": [100.0] * length, + "manning_n": [100.0] * length, + "profile_width": [100.0] * length, + "profile_slope": [100.0] * length, + } +) +df.index.name = "fid" +model.manning_resistance.static.df = df + +# %% +# flow boundaries updaten +length = len(model.flow_boundary.node.df) +df = pd.DataFrame( + { + "node_id": model.flow_boundary.node.df.index.to_list(), + "flow_rate": [0.0] * length, + } +) +df.index.name = "fid" +model.flow_boundary.static.df = df + + +# %% write model +model.write(ribasim_toml) + +# %% From 68c5dbcef4271c70dfc6e8900cb56ac2c18fad6b Mon Sep 17 00:00:00 2001 From: Martijn Visser Date: Tue, 22 Oct 2024 09:52:25 +0200 Subject: [PATCH 21/23] Deduplicate shortest path code (#176) The code was identical except for comments and print statements. So now we move it all into a single function that is part of the package, so the individual scripts are just a few lines. Only AGV was an outlier since it had a lot of code after the shared code. Perhaps @rbruijnshkv can comment on if that is needed or not. Another difference I noticed is this, AGV has this set to 5: ``` distance_interval = 50 # The distance interval you want to segment the lines at ``` This only deduplicates, I didn't want to start modifying the code at the same time. So I cannot run it since it relies on local paths that I don't have. --- pixi.lock | 3 +- .../01_shortest_path_Hollandse_Delta.py | 455 +----------- .../Shortest_path/02_shortest_path_HHSK.py | 454 +----------- .../Shortest_path/03_shortest_path_HHNK.py | 454 +----------- .../04_shortest_path_Delfland.py | 453 +----------- .../05_shortest_path_Scheldestromen.py | 453 +----------- .../06_shortest_path_Zuiderzeeland.py | 458 +----------- .../Shortest_path/07_shortest_path_WSRL.py | 453 +----------- .../08_shortest_path_Wetterskip.py | 454 +----------- .../09_shortest_path_Rijnland.py | 457 +----------- .../Shortest_path/10_shortest_path_AGV.py | 700 +----------------- .../peilbeheerst_model/__init__.py | 3 +- .../peilbeheerst_model/shortest_path.py | 443 +++++++++++ src/peilbeheerst_model/pyproject.toml | 12 +- 14 files changed, 479 insertions(+), 4773 deletions(-) create mode 100644 src/peilbeheerst_model/peilbeheerst_model/shortest_path.py diff --git a/pixi.lock b/pixi.lock index 1188e17..e8534b9 100644 --- a/pixi.lock +++ b/pixi.lock @@ -14706,9 +14706,10 @@ packages: name: peilbeheerst-model version: 0.1.0 path: src/peilbeheerst_model - sha256: 21b935f0542b5dbb76baaf1944b11ef6fab94b5a5926565410fe3d62722c332d + sha256: e8c270f68d683c802990a8bb905cd665fd327b33e1d17bf3e01dfe9c49ec335e requires_dist: - geopandas + - matplotlib - numpy - pandas - pydantic diff --git a/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py b/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py index f982ef6..5200977 100644 --- a/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py +++ b/src/peilbeheerst_model/Shortest_path/01_shortest_path_Hollandse_Delta.py @@ -1,458 +1,7 @@ -# # Holandse Delta -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "Hollandse Delta" - -# Define crossings file path -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - # print(row) - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - # print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - print(len(gdf_object)) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) - - -gdf_out.to_file(f"/DATAFOLDER/projects/4750_30/Data_shortest_path/Hollandse_Delta/{waterschap}_shortest_path2.gpkg") diff --git a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py index 381c6e3..997cff4 100644 --- a/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py +++ b/src/peilbeheerst_model/Shortest_path/02_shortest_path_HHSK.py @@ -1,457 +1,7 @@ -# # HHSK -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "HHSK" - - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - print(index) - - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) diff --git a/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py b/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py index b75ac29..eae179b 100644 --- a/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py +++ b/src/peilbeheerst_model/Shortest_path/03_shortest_path_HHNK.py @@ -1,457 +1,7 @@ -# # HHNK -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "HHNK" - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - # print(row) - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - # print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - print(len(gdf_object)) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - print(gdf_cross_single) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) diff --git a/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py b/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py index 8521b8b..a218116 100644 --- a/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py +++ b/src/peilbeheerst_model/Shortest_path/04_shortest_path_Delfland.py @@ -1,456 +1,7 @@ -# # Delfland -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "Delfland" - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - print(index) - - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) diff --git a/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py b/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py index 288be37..a82a7d3 100644 --- a/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py +++ b/src/peilbeheerst_model/Shortest_path/05_shortest_path_Scheldestromen.py @@ -1,456 +1,7 @@ -# # Scheldestromen -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "Scheldestromen" - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -gdf_rhws.plot() - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - print(index) - - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) diff --git a/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py b/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py index a128e10..054fe44 100644 --- a/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py +++ b/src/peilbeheerst_model/Shortest_path/06_shortest_path_Zuiderzeeland.py @@ -1,461 +1,7 @@ -# # Zuiderzeeland -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "Zuiderzeeland" - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" -print(data_path) -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -DATA.keys() - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - # print(row) - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - # print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - print(len(gdf_object)) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) - - -# message = pd.DataFrame() -# message['text'] = np.nan#'Zuiderzeeland is klaar' -# message.to_csv('Zuiderzeeland_is_klaar.txt') diff --git a/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py b/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py index 2a9d15e..172f9c6 100644 --- a/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py +++ b/src/peilbeheerst_model/Shortest_path/07_shortest_path_WSRL.py @@ -1,456 +1,7 @@ -# # WSRL -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "WSRL" - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - print(index) - - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) diff --git a/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py b/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py index 6a1d9bf..75fdd93 100644 --- a/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py +++ b/src/peilbeheerst_model/Shortest_path/08_shortest_path_Wetterskip.py @@ -1,457 +1,7 @@ -# # Wetterskip -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "Wetterskip" - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - print(index) - - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) diff --git a/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py b/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py index 8ac7aa6..0b2a5a1 100644 --- a/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py +++ b/src/peilbeheerst_model/Shortest_path/09_shortest_path_Rijnland.py @@ -1,460 +1,7 @@ -# # Rijnland -# -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# - - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -from peilbeheerst_model import waterschap_data - -# ### Load Data - +from peilbeheerst_model import shortest_path_waterschap waterschap = "Rijnland" - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = waterschap_data[waterschap]["init"]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - print(index) - - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 50 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - except Exception as e: - print(e) - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) - - -1 diff --git a/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py b/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py index cf4e8a3..1d16166 100644 --- a/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py +++ b/src/peilbeheerst_model/Shortest_path/10_shortest_path_AGV.py @@ -1,701 +1,7 @@ -# # AGV -# -# -# ### Create shortest_path RHWS network -# -# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 -# +from peilbeheerst_model import shortest_path_waterschap - -import fiona -import geopandas as gpd -import matplotlib.pyplot as plt -import networkx as nx -import numpy as np -import pandas as pd -import shapely -import tqdm.auto as tqdm -from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import split -from shapely.wkt import dumps - -# ### Load Data - - -waterschap1 = "AmstelGooienVecht" -waterschap2 = "AGV" -waterschap = waterschap1 - -# Define crossings file path -path2json = "/DATAFOLDER/projects/4750_30/Scripts/Ribasim-NL/src/peilbeheerst_model/waterschappen.json" -data_path_str = pd.read_json(path2json).loc["init"][waterschap1]["output_path"] -data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" - - -# Load crossings file -DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} - - -# ### Select rhws - - -# Select RHWS peilgebeied & calculate representative point -gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() -gdf_rhws["representative_point"] = gdf_rhws.representative_point() - -# Apply aggregation level based filter -gdf_cross = ( - DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() -) # filter aggregation level - - -# ### Define functions -# 1. splitting functions -# 2. connect graphs functions -# 3. explode nodes functions - - -def split_line_at_point(line, point): - buff = point.buffer(1e-4) # Small buffer around the point - split_result = split(line, buff) - if len(split_result.geoms) in [2, 3]: - # Assume first and last segments are the result, ignore tiny middle segment if exists - result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) - else: - # Return the original line as a MultiLineString for consistency if no split occurred - result = MultiLineString([line]) - return result - - -def split_lines_at_intersections(gdf_object): - split_lines = [] - gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes - - for idx, row in gdf_object.iterrows(): - was_split = False - - # Get potential intersections using spatial index - possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) - possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self - precise_matches = possible_matches[possible_matches.intersects(row.geometry)] - - for match_idx, match in precise_matches.iterrows(): - if row.geometry.intersects(match.geometry): - intersection = row.geometry.intersection(match.geometry) - if isinstance(intersection, Point): - # Split the current line at the intersection point - try: - split_result = split_line_at_point(row.geometry, intersection) - for geom in split_result.geoms: - new_row = row.copy() - new_row.geometry = geom - split_lines.append(new_row) - was_split = True - except ValueError as e: - print(f"Error splitting line: {e}") - # Add other intersection types handling if needed - break # Assumes only one split per line; remove or modify for multiple splits - - if not was_split: - # If the line was not split, include the original line - split_lines.append(row) - - # Create a new GeoDataFrame from the split or original lines - result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) - return result_gdf - - -def component_to_gdf(component, node_geometries): - geometries = [node_geometries[node] for node in component] - return gpd.GeoDataFrame(geometry=geometries, index=list(component)) - - -def connect_components(graph, node1, node2, node_geometries): - geom1 = node_geometries[node1] - geom2 = node_geometries[node2] - new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) - graph.add_edge(node1, node2, geometry=new_edge_geom) - - -def find_closest_component_pair(largest_gdf, smaller_gdfs): - print(len(smaller_gdfs), end="\r") - sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) - nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) - li, si = nearest_i[:, np.argmin(dist2)] - - nearest_idx, dist = smaller_gdfs[si].sindex.nearest( - largest_gdf.geometry.iat[li], return_all=False, return_distance=True - ) - node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] - node_in_largest = largest_gdf.index[li] - closest_pair_nodes = (node_in_largest, node_in_smaller) - # print("done") - return si, closest_pair_nodes - - -def cut_linestring_at_interval(line, interval): - """Cut a LineString into segments of a specified interval.""" - # Calculate the number of segments needed - num_segments = int(np.ceil(line.length / interval)) - if num_segments == 1: - return [line] - - points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] - return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] - - -def explode_linestrings(gdf, interval): - """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" - segments = [] - for _, row in gdf.iterrows(): - line = row.geometry - segments.extend(cut_linestring_at_interval(line, interval)) - - return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) - - -def connect_linestrings_within_distance(gdf, max_distance=4): - gdf = gdf.explode(ignore_index=False, index_parts=True) - gdf["geometry"] = gdf.make_valid() - gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) - gdf = gdf[~gdf.is_empty].copy() - - change_idx, change_geom = [], [] - for row in tqdm.tqdm( - gdf.itertuples(), - total=len(gdf), - ): - ps = row.geometry.boundary.geoms - if len(ps) != 2: - continue - p0, p1 = ps - - p0_changed, p1_changed = False, False - idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") - if len(idx0) > 0: - dist0 = gdf.iloc[idx0].distance(p0) - if (dist0 > 10e-8).any(): - snap_lbl0 = dist0[dist0 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl0] - p0 = geom.interpolate(geom.project(p0)) - p0_changed = True - - idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") - if len(idx1) > 0: - dist1 = gdf.iloc[idx1].distance(p1) - if (dist1 > 10e-8).any(): - snap_lbl1 = dist1[dist1 > 10e-8].idxmin() - geom = gdf.geometry.at[snap_lbl1] - p1 = geom.interpolate(geom.project(p1)) - p1_changed = True - - if p0_changed or p1_changed: - coords = list(row.geometry.coords) - if p0_changed: - coords = list(p0.coords) + coords - if p1_changed: - coords = coords + list(p1.coords) - change_idx.append(row.Index) - change_geom.append(LineString(coords)) - - if len(change_idx) > 0: - gdf.loc[change_idx, "geometry"] = change_geom - - return gdf - - -# # Shortest Path - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - -# Loop RHWS polygons -gdf_crossings_out = [] - -for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): - try: - # print(row) - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - # print("Clip Crossings/Hydroobjects") - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - print("Split Hydroobjects at Intersect") - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - print(len(gdf_object)) - - print("Connect Hydroobjects within distance") - # Explode the linestrings into smaller segments - distance_interval = 5 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - print("Find distruptions in Graph") - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - print(len(smaller_components), end="\r") - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap1} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap1}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path["geometry"] = shortest_path.apply( - lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 - ) - - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap1}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - - except Exception as e: - print(e) - - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) -gdf_out.to_file( - f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap2}/{waterschap2}_shortest_path.gpkg", driver="GPKG" -) - - -gdf_crossings_out = [] -gdf_rhws = gdf_rhws.reset_index(drop=True) - - -# Loop RHWS polygons -gdf_crossings_out = [] - -for index, rhws in gdf_rhws.iterrows(): - try: - print(index) - ### Select Crossings/Hydroobjects ### - print("Select Crossings/Hydroobjects") - # Single RHWS row as GeoDataFrame - gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) - - # Select for each boezem polygon the relevant crossings - globalid_value = gdf_rhws_single.globalid.iloc[0] - gdf_cross_single = gdf_cross[ - (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) - ].copy() - - # Select hydroobjects in RHWS polygons - gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) - gdf_object = gdf_object.reset_index(drop=True) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - # Split lines at intersection - gdf_object = split_lines_at_intersections(gdf_object) - - # Explode the linestrings into smaller segments - distance_interval = 200 # The distance interval you want to segment the lines at - gdf_object = explode_linestrings(gdf_object, distance_interval) - - # Make sure that hydroobjects are connected - gdf_object = connect_linestrings_within_distance(gdf_object) - - # Explode linestrings - gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) - gdf_object = gdf_object[~gdf_object.is_empty].copy() - gdf_object = gdf_object[gdf_object.length > 1e-7].copy() - - ### Create NetworkX nodes ### - print("Create NetworkX") - # Use start and end points from hydroobjects in networkx as nodes - nodes_gdf = gdf_object.copy() - nodes_gdf["geometry"] = nodes_gdf.geometry.boundary - nodes_gdf = nodes_gdf.explode(index_parts=True) - - # Use the unique points as nodes in networkx - nodes_gdf.insert(0, "node_id", -1) - node_id = 1 - for geom, group in nodes_gdf.groupby("geometry"): - nodes_gdf.loc[group.index, "node_id"] = node_id - node_id += 1 - - ### Select startpoints & endpoints RHWS network ### - # Find the closest starting points from the crossings. - # Keep only points which are (almost) equal to the crossings. - startpoints, distances = nodes_gdf.sindex.nearest( - gdf_cross_single.geometry, return_all=False, return_distance=True - ) - startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values - - gdf_cross_single["node_id"] = startpoints - gdf_cross_single["node_id_distance"] = distances - - # find the node_id closest to the RHWS representative point (end point) - # Exclude the points which are already used as starting points - df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() - endpoint, distance = df_endpoint.sindex.nearest( - rhws.representative_point, return_all=False, return_distance=True - ) - - endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] - gdf_rhws_single["node_id"] = endpoint - gdf_rhws_single["node_id_distance"] = distance - - ### Create networkx graph ### - graph = nx.Graph() - - # add nodes in boezem - for node_id, group in nodes_gdf.groupby("node_id"): - graph.add_node(node_id, geometry=group.geometry.iat[0]) - - # add edges - line_lookup = gdf_object.geometry - for idx0, group in nodes_gdf.groupby(level=0): - node_from, node_to = group.node_id - line_geom = gdf_object.geometry.at[idx0] - graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) - - ### Find distruptions Graph ### - # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes - # Get lists of compnents (sub-graph) - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - print(len(smaller_components), end="\r") - while True: - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] - - if not smaller_components: # If there are no smaller components left, break the loop - break - - # Update node geometries and largest_gdf for each iteration - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - largest_gdf = component_to_gdf(largest_component, node_geometries) - smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] - - # Find the closest smaller_gdf to the largest_gdf - closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair(largest_gdf, smaller_gdfs) - - # Connect the closest nodes - connect_components(graph, node_in_largest, node_in_smaller, node_geometries) - - # calculate shortest_path networkx - gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() - not_connected = [] - - components = list(nx.connected_components(graph)) - largest_component = max(components, key=len) - smaller_components = [comp for comp in components if comp != largest_component] # not used anymore - node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = shapely.ops.linemerge( - edges - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - if not_connected: - print("not connected") - # Force connection - # Convert the largest connected component to a GeoDataFrame for spatial operations - largest_component_gdf = gpd.GeoDataFrame( - geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs - ) - largest_component_gdf["node_id"] = list(largest_component) - - # Iterate over each not_connected node - for nc_node in not_connected: - nc_node_geom = node_geometries[nc_node] - - # Calculate the distance to all nodes in the largest component - distances = largest_component_gdf.geometry.distance(nc_node_geom) - - # Find the closest node in the largest component - closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id - - # Add edge between not_connected node and closest node in the largest component - # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements - graph.add_edge( - nc_node, - closest_node_id, - geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), - ) - - for startpoint in startpoints: - try: - shortest_path = nx.shortest_path( - graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" - ) - edges = [] - for i in range(0, len(shortest_path) - 1): - edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) - gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( - shapely.ops.linemerge(edges) - ) - - except nx.NetworkXNoPath as e: - print(e) - not_connected.append(startpoint) - - ### Append output ### - gdf_crossings_out.append(gdf_cross_single) - - ### Plot graph ### - print("Plotting Output") - fig, ax = plt.subplots(figsize=(8, 8)) - plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) - plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) - plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) - ax.set_title(f"{waterschap} shortest paths {index}") - plt_rhws.plot(ax=ax, color="green") - gdf_rhws_single.plot(ax=ax, color="lightblue") - plt_rep.plot(ax=ax, color="blue", label="representative_point") - gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") - gdf_cross_single.plot(ax=ax, color="orange", label="crossings") - plt_paths.plot(ax=ax, color="purple", label="shortest paths") - ax.legend() - plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) - - # Save results - print("Writing Output") - objects = {} - objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) - shortest_path = gdf_cross_single.drop(columns=["geometry"]) - shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) - shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) - objects["shortest_path"] = shortest_path - objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( - columns=["representative_point"] - ) - objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) - objects["representative_point"] = gpd.GeoDataFrame( - gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs - ).drop(columns=["geometry"]) - objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) - - for key, value in objects.items(): - # For each GeoDataFrame, save it to a layer in the GeoPackage - value.to_file( - f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" - ) - except Exception as e: - print(e) - - -# Write final output -gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) -gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) +waterschap = "AmstelGooienVecht" +gdf_out = shortest_path_waterschap(waterschap) gdf_out.to_file( f"/DATAFOLDER/projects/4750_30/Data_shortest_path/{waterschap}/{waterschap}_shortest_path.gpkg", driver="GPKG" ) diff --git a/src/peilbeheerst_model/peilbeheerst_model/__init__.py b/src/peilbeheerst_model/peilbeheerst_model/__init__.py index 58608ef..3fd1b2e 100644 --- a/src/peilbeheerst_model/peilbeheerst_model/__init__.py +++ b/src/peilbeheerst_model/peilbeheerst_model/__init__.py @@ -1,6 +1,7 @@ __version__ = "0.1.0" from peilbeheerst_model.parse_crossings import ParseCrossings +from peilbeheerst_model.shortest_path import shortest_path_waterschap from peilbeheerst_model.waterschappen import waterschap_data -__all__ = ["ParseCrossings", "waterschap_data"] +__all__ = ["ParseCrossings", "shortest_path_waterschap", "waterschap_data"] diff --git a/src/peilbeheerst_model/peilbeheerst_model/shortest_path.py b/src/peilbeheerst_model/peilbeheerst_model/shortest_path.py new file mode 100644 index 0000000..75a395d --- /dev/null +++ b/src/peilbeheerst_model/peilbeheerst_model/shortest_path.py @@ -0,0 +1,443 @@ +# Create shortest_path RHWS network +# +# Code is based on: https://github.com/Deltares/Ribasim-NL/blob/1ad35931f49280fe223cbd9409e321953932a3a4/notebooks/ijsselmeermodel/netwerk.py#L55 + +import fiona +import geopandas as gpd +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +import pandas as pd +import shapely +import tqdm.auto as tqdm +from shapely.geometry import LineString, MultiLineString, Point +from shapely.ops import split +from shapely.wkt import dumps + +from peilbeheerst_model import waterschap_data + +# ### Define functions +# 1. splitting functions +# 2. connect graphs functions +# 3. explode nodes functions + + +def split_line_at_point(line, point): + buff = point.buffer(1e-4) # Small buffer around the point + split_result = split(line, buff) + if len(split_result.geoms) in [2, 3]: + # Assume first and last segments are the result, ignore tiny middle segment if exists + result = MultiLineString([split_result.geoms[0], split_result.geoms[-1]]) + else: + # Return the original line as a MultiLineString for consistency if no split occurred + result = MultiLineString([line]) + return result + + +def split_lines_at_intersections(gdf_object): + split_lines = [] + gdf_object.drop(columns=["geometry"]) # Preserve non-geometry attributes + + for idx, row in gdf_object.iterrows(): + was_split = False + + # Get potential intersections using spatial index + possible_matches_index = list(gdf_object.sindex.intersection(row.geometry.bounds)) + possible_matches = gdf_object.iloc[possible_matches_index].drop(idx) # Exclude self + precise_matches = possible_matches[possible_matches.intersects(row.geometry)] + + for match_idx, match in precise_matches.iterrows(): + if row.geometry.intersects(match.geometry): + intersection = row.geometry.intersection(match.geometry) + if isinstance(intersection, Point): + # Split the current line at the intersection point + try: + split_result = split_line_at_point(row.geometry, intersection) + for geom in split_result.geoms: + new_row = row.copy() + new_row.geometry = geom + split_lines.append(new_row) + was_split = True + except ValueError as e: + print(f"Error splitting line: {e}") + # Add other intersection types handling if needed + break # Assumes only one split per line; remove or modify for multiple splits + + if not was_split: + # If the line was not split, include the original line + split_lines.append(row) + + # Create a new GeoDataFrame from the split or original lines + result_gdf = gpd.GeoDataFrame(split_lines, columns=gdf_object.columns) + return result_gdf + + +def component_to_gdf(component, node_geometries): + geometries = [node_geometries[node] for node in component] + return gpd.GeoDataFrame(geometry=geometries, index=list(component)) + + +def connect_components(graph, node1, node2, node_geometries): + geom1 = node_geometries[node1] + geom2 = node_geometries[node2] + new_edge_geom = LineString([geom1.coords[0], geom2.coords[0]]) + graph.add_edge(node1, node2, geometry=new_edge_geom) + + +def find_closest_component_pair(largest_gdf, smaller_gdfs): + print(len(smaller_gdfs), end="\r") + sgdf = gpd.GeoSeries([shapely.geometry.MultiPoint(small_gdf.geometry.tolist()) for small_gdf in smaller_gdfs]) + nearest_i, dist2 = sgdf.sindex.nearest(largest_gdf.geometry, return_all=False, return_distance=True) + li, si = nearest_i[:, np.argmin(dist2)] + + nearest_idx, dist = smaller_gdfs[si].sindex.nearest( + largest_gdf.geometry.iat[li], return_all=False, return_distance=True + ) + node_in_smaller = smaller_gdfs[si].index[nearest_idx[1, 0]] + node_in_largest = largest_gdf.index[li] + closest_pair_nodes = (node_in_largest, node_in_smaller) + # print("done") + return si, closest_pair_nodes + + +def cut_linestring_at_interval(line, interval): + """Cut a LineString into segments of a specified interval.""" + # Calculate the number of segments needed + num_segments = int(np.ceil(line.length / interval)) + if num_segments == 1: + return [line] + + points = [line.interpolate(distance) for distance in np.linspace(0, line.length, num_segments + 1)] + return [LineString([points[i], points[i + 1]]) for i in range(num_segments)] + + +def explode_linestrings(gdf, interval): + """Explode LineStrings in a GeoDataFrame into smaller segments based on a distance interval.""" + segments = [] + for _, row in gdf.iterrows(): + line = row.geometry + segments.extend(cut_linestring_at_interval(line, interval)) + + return gpd.GeoDataFrame(geometry=segments, crs=gdf.crs) + + +def connect_linestrings_within_distance(gdf, max_distance=4): + gdf = gdf.explode(ignore_index=False, index_parts=True) + gdf["geometry"] = gdf.make_valid() + gdf["geometry"] = gdf.geometry.apply(shapely.force_2d) + gdf = gdf[~gdf.is_empty].copy() + + change_idx, change_geom = [], [] + for row in tqdm.tqdm( + gdf.itertuples(), + total=len(gdf), + ): + ps = row.geometry.boundary.geoms + if len(ps) != 2: + continue + p0, p1 = ps + + p0_changed, p1_changed = False, False + idx0 = gdf.sindex.query(p0.buffer(max_distance), predicate="intersects") + if len(idx0) > 0: + dist0 = gdf.iloc[idx0].distance(p0) + if (dist0 > 10e-8).any(): + snap_lbl0 = dist0[dist0 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl0] + p0 = geom.interpolate(geom.project(p0)) + p0_changed = True + + idx1 = gdf.sindex.query(p1.buffer(max_distance), predicate="intersects") + if len(idx1) > 0: + dist1 = gdf.iloc[idx1].distance(p1) + if (dist1 > 10e-8).any(): + snap_lbl1 = dist1[dist1 > 10e-8].idxmin() + geom = gdf.geometry.at[snap_lbl1] + p1 = geom.interpolate(geom.project(p1)) + p1_changed = True + + if p0_changed or p1_changed: + coords = list(row.geometry.coords) + if p0_changed: + coords = list(p0.coords) + coords + if p1_changed: + coords = coords + list(p1.coords) + change_idx.append(row.Index) + change_geom.append(LineString(coords)) + + if len(change_idx) > 0: + gdf.loc[change_idx, "geometry"] = change_geom + + return gdf + + +def shortest_path(waterschap, DATA, gdf_cross, gdf_rhws): + gdf_rhws = gdf_rhws.reset_index(drop=True) + + # Loop RHWS polygons + gdf_crossings_out = [] + + for index, rhws in tqdm.tqdm(gdf_rhws.iterrows(), total=len(gdf_rhws), colour="blue"): + try: + ### Select Crossings/Hydroobjects ### + print("Select Crossings/Hydroobjects") + + # Single RHWS row as GeoDataFrame + gdf_rhws_single = gpd.GeoDataFrame(rhws.to_frame().T, geometry="geometry", crs=gdf_rhws.crs) + + # Select for each boezem polygon the relevant crossings + globalid_value = gdf_rhws_single.globalid.iloc[0] + gdf_cross_single = gdf_cross[ + (gdf_cross.peilgebied_from == globalid_value) | (gdf_cross.peilgebied_to == globalid_value) + ].copy() + print("Clip Crossings/Hydroobjects") + # Select hydroobjects in RHWS polygons + gdf_object = gpd.clip(DATA["hydroobject"], gdf_rhws_single) + gdf_object = gdf_object.reset_index(drop=True) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + print("Split Hydroobjects at Intersect") + # Split lines at intersection + gdf_object = split_lines_at_intersections(gdf_object) + + print("Connect Hydroobjects within distance") + # Explode the linestrings into smaller segments + distance_interval = 50 # The distance interval you want to segment the lines at + gdf_object = explode_linestrings(gdf_object, distance_interval) + + # Make sure that hydroobjects are connected + gdf_object = connect_linestrings_within_distance(gdf_object) + + # Explode linestrings + gdf_object = gdf_object.explode(index_parts=False).reset_index(drop=True) + gdf_object = gdf_object[~gdf_object.is_empty].copy() + gdf_object = gdf_object[gdf_object.length > 1e-7].copy() + + ### Create NetworkX nodes ### + print("Create NetworkX") + # Use start and end points from hydroobjects in networkx as nodes + nodes_gdf = gdf_object.copy() + nodes_gdf["geometry"] = nodes_gdf.geometry.boundary + nodes_gdf = nodes_gdf.explode(index_parts=True) + + # Use the unique points as nodes in networkx + nodes_gdf.insert(0, "node_id", -1) + node_id = 1 + for geom, group in nodes_gdf.groupby("geometry"): + nodes_gdf.loc[group.index, "node_id"] = node_id + node_id += 1 + + ### Select startpoints & endpoints RHWS network ### + # Find the closest starting points from the crossings. + # Keep only points which are (almost) equal to the crossings. + startpoints, distances = nodes_gdf.sindex.nearest( + gdf_cross_single.geometry, return_all=False, return_distance=True + ) + startpoints = nodes_gdf.node_id.iloc[startpoints[1, :]].values + + gdf_cross_single["node_id"] = startpoints + gdf_cross_single["node_id_distance"] = distances + + # find the node_id closest to the RHWS representative point (end point) + # Exclude the points which are already used as starting points + df_endpoint = nodes_gdf[~nodes_gdf.node_id.isin(gdf_cross_single.node_id)].copy() + endpoint, distance = df_endpoint.sindex.nearest( + rhws.representative_point, return_all=False, return_distance=True + ) + + endpoint = df_endpoint.node_id.iat[endpoint[1, 0]] + gdf_rhws_single["node_id"] = endpoint + gdf_rhws_single["node_id_distance"] = distance + + ### Create networkx graph ### + graph = nx.Graph() + + # add nodes in boezem + for node_id, group in nodes_gdf.groupby("node_id"): + graph.add_node(node_id, geometry=group.geometry.iat[0]) + + # add edges + for idx0, group in nodes_gdf.groupby(level=0): + node_from, node_to = group.node_id + line_geom = gdf_object.geometry.at[idx0] + graph.add_edge(node_from, node_to, length=line_geom.length, geometry=line_geom) + + ### Find distruptions Graph ### + # The graph often consists of multiple smaller graphs due to edges not properly connecting with nodes + # Get lists of compnents (sub-graph) + print("Find distruptions in Graph") + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] # not used anymore + print(len(smaller_components), end="\r") + + while True: + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + + if not smaller_components: # If there are no smaller components left, break the loop + break + + print(len(smaller_components), end="\r") + # Update node geometries and largest_gdf for each iteration + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + largest_gdf = component_to_gdf(largest_component, node_geometries) + smaller_gdfs = [component_to_gdf(comp, node_geometries) for comp in smaller_components] + + # Find the closest smaller_gdf to the largest_gdf + closest_index, (node_in_largest, node_in_smaller) = find_closest_component_pair( + largest_gdf, smaller_gdfs + ) + + # Connect the closest nodes + connect_components(graph, node_in_largest, node_in_smaller, node_geometries) + + # calculate shortest_path networkx + gdf_cross_single["shortest_path"] = shapely.geometry.GeometryCollection() + not_connected = [] + + components = list(nx.connected_components(graph)) + largest_component = max(components, key=len) + smaller_components = [comp for comp in components if comp != largest_component] + node_geometries = {node: graph.nodes[node]["geometry"] for node in graph.nodes()} + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + if not_connected: + print("not connected") + # Force connection + # Convert the largest connected component to a GeoDataFrame for spatial operations + largest_component_gdf = gpd.GeoDataFrame( + geometry=[node_geometries[node] for node in largest_component], crs=gdf_rhws.crs + ) + largest_component_gdf["node_id"] = list(largest_component) + + # Iterate over each not_connected node + for nc_node in not_connected: + nc_node_geom = node_geometries[nc_node] + + # Calculate the distance to all nodes in the largest component + distances = largest_component_gdf.geometry.distance(nc_node_geom) + + # Find the closest node in the largest component + closest_node_id = largest_component_gdf.iloc[distances.idxmin()].node_id + + # Add edge between not_connected node and closest node in the largest component + # Note: You might want to calculate the LineString geometry connecting these nodes based on your specific requirements + graph.add_edge( + nc_node, + closest_node_id, + geometry=LineString([node_geometries[nc_node], node_geometries[closest_node_id]]), + ) + + for startpoint in startpoints: + try: + shortest_path = nx.shortest_path( + graph, source=startpoint, target=endpoint, weight="length", method="dijkstra" + ) + edges = [] + for i in range(0, len(shortest_path) - 1): + edges.append(graph.get_edge_data(shortest_path[i], shortest_path[i + 1])["geometry"]) + gdf_cross_single.loc[gdf_cross_single.node_id == startpoint, "shortest_path"] = ( + shapely.ops.linemerge(edges) + ) + + except nx.NetworkXNoPath as e: + print(e) + not_connected.append(startpoint) + + ### Append output ### + gdf_crossings_out.append(gdf_cross_single) + + ### Plot graph ### + print("Plotting Output") + fig, ax = plt.subplots(figsize=(8, 8)) + plt_paths = gpd.GeoDataFrame(gdf_cross_single, geometry="shortest_path", crs=gdf_cross_single.crs) + plt_rep = gpd.GeoDataFrame(gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs) + plt_rhws = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs) + ax.set_title(f"{waterschap} shortest paths {index}") + plt_rhws.plot(ax=ax, color="green") + gdf_rhws_single.plot(ax=ax, color="lightblue") + plt_rep.plot(ax=ax, color="blue", label="representative_point") + gdf_object.plot(ax=ax, color="gray", linewidth=0.5, label="hydroobjects") + gdf_cross_single.plot(ax=ax, color="orange", label="crossings") + plt_paths.plot(ax=ax, color="purple", label="shortest paths") + ax.legend() + plt.savefig(f"./shortest_path/Figures/shortest_path_{waterschap}_RHWS_{index}_new", dpi=300) + + # Save results + print("Writing Output") + objects = {} + objects["hydroobjects"] = gpd.GeoDataFrame(gdf_object, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path = gdf_cross_single.drop(columns=["geometry"]) + shortest_path = shortest_path.rename(columns={"shortest_path": "geometry"}) + shortest_path = gpd.GeoDataFrame(shortest_path, geometry="geometry", crs=gdf_cross_single.crs) + shortest_path["geometry"] = shortest_path.apply( + lambda r: shapely.simplify(r.geometry, tolerance=1, preserve_topology=True), axis=1 + ) + + objects["shortest_path"] = shortest_path + objects["rhws"] = gpd.GeoDataFrame(gdf_rhws_single, geometry="geometry", crs=gdf_rhws_single.crs).drop( + columns=["representative_point"] + ) + objects["crossings"] = gdf_cross_single.drop(columns=["shortest_path"]) + objects["representative_point"] = gpd.GeoDataFrame( + gdf_rhws_single, geometry="representative_point", crs=gdf_rhws_single.crs + ).drop(columns=["geometry"]) + objects["nodes"] = gpd.GeoDataFrame(nodes_gdf, geometry="geometry", crs=gdf_cross_single.crs) + + for key, value in objects.items(): + # For each GeoDataFrame, save it to a layer in the GeoPackage + value.to_file( + f"./shortest_path/Geopackages/{waterschap}_unconnected_{index}.gpkg", layer=key, driver="GPKG" + ) + + except Exception as e: + print(e) + + return gdf_crossings_out + + +def shortest_path_waterschap(waterschap): + # Load Data + # Define crossings file path + data_path_str = waterschap_data[waterschap]["init"]["output_path"] + data_path = f"/DATAFOLDER/projects/4750_30/{data_path_str.split('../')[-1]}" + + # Load crossings file + DATA = {L: gpd.read_file(data_path, layer=L) for L in fiona.listlayers(data_path)} + + # ### Select rhws + + # Select RHWS peilgebeied & calculate representative point + gdf_rhws = DATA["peilgebied"].loc[DATA["peilgebied"]["peilgebied_cat"] == 1].copy() + gdf_rhws["representative_point"] = gdf_rhws.representative_point() + + # Apply aggregation level based filter + gdf_cross = ( + DATA["crossings_hydroobject_filtered"].loc[DATA["crossings_hydroobject_filtered"]["agg_links_in_use"]].copy() + ) # filter aggregation level + + gdf_crossings_out = shortest_path(waterschap, DATA, gdf_cross, gdf_rhws) + # Write final output + gdf_out = gpd.GeoDataFrame(pd.concat(gdf_crossings_out)) + gdf_out["shortest_path"] = gdf_out["shortest_path"].apply(lambda geom: dumps(geom) if geom is not None else None) + return gdf_out diff --git a/src/peilbeheerst_model/pyproject.toml b/src/peilbeheerst_model/pyproject.toml index 856c664..82b6a21 100644 --- a/src/peilbeheerst_model/pyproject.toml +++ b/src/peilbeheerst_model/pyproject.toml @@ -14,7 +14,17 @@ authors = [ license = { text = "MIT" } requires-python = ">=3.10" -dependencies = ["geopandas", "numpy", "pandas", "pydantic", "shapely", "tqdm", "matplotlib"] +dependencies = [ + "geopandas", + "numpy", + "pandas", + "pydantic", + "shapely", + "tqdm", + "matplotlib", + "fiona", + "networkx", +] dynamic = ["version"] [project.optional-dependencies] From 7e4b2992604a0a132222f26fa9f6f5773c896a5e Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Tue, 22 Oct 2024 13:25:49 +0200 Subject: [PATCH 22/23] Fix brabantse delta (#177) Co-authored-by: Martijn Visser --- .../brabantse_delta/01_fix_model_network.py | 174 ++++++++++++++++++ 1 file changed, 174 insertions(+) create mode 100644 notebooks/brabantse_delta/01_fix_model_network.py diff --git a/notebooks/brabantse_delta/01_fix_model_network.py b/notebooks/brabantse_delta/01_fix_model_network.py new file mode 100644 index 0000000..a6fffb7 --- /dev/null +++ b/notebooks/brabantse_delta/01_fix_model_network.py @@ -0,0 +1,174 @@ +# %% +import geopandas as gpd +import numpy as np +import pandas as pd +from ribasim import Node +from ribasim.nodes import basin, level_boundary, manning_resistance, outlet + +from ribasim_nl import CloudStorage, Model, NetworkValidator +from ribasim_nl.geometry import drop_z + +cloud = CloudStorage() + +authority = "BrabantseDelta" +short_name = "wbd" + +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_2024_6_3", f"{short_name}.toml") +database_gpkg = ribasim_toml.with_name("database.gpkg") + + +# %% read model +model = Model.read(ribasim_toml) +ribasim_toml = cloud.joinpath(authority, "modellen", f"{authority}_fix_model_network", f"{short_name}.toml") +network_validator = NetworkValidator(model) + + +hydroobject_gdf = gpd.read_file( + cloud.joinpath(authority, "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject", fid_as_index=True +) + + +# %% some stuff we'll need again +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +level_data = level_boundary.Static(level=[0]) + +basin_data = [ + basin.Profile(level=[0.0, 1.0], area=[0.01, 1000.0]), + basin.Static( + drainage=[0.0], + potential_evaporation=[0.001 / 86400], + infiltration=[0.0], + precipitation=[0.005 / 86400], + ), + basin.State(level=[0]), +] +outlet_data = outlet.Static(flow_rate=[100]) + + +# HIER KOMEN ISSUES + +# %% https://github.com/Deltares/Ribasim-NL/issues/152#issuecomment-2427492528 + +# Herstellen verbinding Schelde-Rijnverbinding met KDU02582 +model.remove_node(2288, remove_edges=True) +model.redirect_edge(edge_id=2450, to_node_id=955) + +# %% https://github.com/Deltares/Ribasim-NL/issues/152#issue-2535747701 +# Omkeren edges +edge_ids = [2470, 2468, 2469, 2465, 748, 2476, 2489, 697, 2500, 2487, 2440] + +for edge_id in edge_ids: + model.reverse_edge(edge_id=edge_id) + +# %% https://github.com/Deltares/Ribasim-NL/issues/152#issuecomment-2428677846 +# Toevoegen Donge +basin_node = model.basin.add( + Node(geometry=drop_z(hydroobject_gdf.at[13091, "geometry"].boundary.geoms[0])), tables=basin_data +) +outlet_node = model.outlet.add( + Node(geometry=drop_z(hydroobject_gdf.at[13136, "geometry"].boundary.geoms[0])), tables=[outlet_data] +) +model.redirect_edge(edge_id=2477, from_node_id=basin_node.node_id, to_node_id=973) +model.edge.add(basin_node, outlet_node) +model.edge.add(outlet_node, model.level_boundary[31]) + +outlet_node = model.outlet.add( + Node(geometry=drop_z(hydroobject_gdf.at[13088, "geometry"].boundary.geoms[0])), tables=[outlet_data] +) +model.redirect_edge(edge_id=2497, to_node_id=outlet_node.node_id) +model.redirect_edge(edge_id=2498, from_node_id=outlet_node.node_id, to_node_id=basin_node.node_id) + + +# EINDE ISSUES + + +# %% +# corrigeren knoop-topologie + +# ManningResistance bovenstrooms LevelBoundary naar Outlet +for row in network_validator.edge_incorrect_type_connectivity().itertuples(): + model.update_node(row.from_node_id, "Outlet", data=[outlet_data]) + +# Inlaten van ManningResistance naar Outlet +for row in network_validator.edge_incorrect_type_connectivity( + from_node_type="LevelBoundary", to_node_type="ManningResistance" +).itertuples(): + model.update_node(row.to_node_id, "Outlet", data=[outlet_data]) + + +## UPDATEN STATIC TABLES + +# %% +# basin-profielen/state updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.basin.node.df.index.to_numpy(), 2), + "level": [0.0, 1.0] * len(model.basin.node.df), + "area": [0.01, 1000.0] * len(model.basin.node.df), + } +) +df.index.name = "fid" +model.basin.profile.df = df + +df = model.basin.profile.df.groupby("node_id")[["level"]].max().reset_index() +df.index.name = "fid" +model.basin.state.df = df + +# %% +# tabulated_rating_curves updaten +df = pd.DataFrame( + { + "node_id": np.repeat(model.tabulated_rating_curve.node.df.index.to_numpy(), 2), + "level": [0.0, 5] * len(model.tabulated_rating_curve.node.df), + "flow_rate": [0, 0.1] * len(model.tabulated_rating_curve.node.df), + } +) +df.index.name = "fid" +model.tabulated_rating_curve.static.df = df + + +# %% + +# level_boundaries updaten +df = pd.DataFrame( + { + "node_id": model.level_boundary.node.df.index.to_list(), + "level": [0.0] * len(model.level_boundary.node.df), + } +) +df.index.name = "fid" +model.level_boundary.static.df = df + +# %% +# manning_resistance updaten +length = len(model.manning_resistance.node.df) +df = pd.DataFrame( + { + "node_id": model.manning_resistance.node.df.index.to_list(), + "length": [100.0] * length, + "manning_n": [100.0] * length, + "profile_width": [100.0] * length, + "profile_slope": [100.0] * length, + } +) +df.index.name = "fid" +model.manning_resistance.static.df = df + +# %% +# flow boundaries updaten +length = len(model.flow_boundary.node.df) +df = pd.DataFrame( + { + "node_id": model.flow_boundary.node.df.index.to_list(), + "flow_rate": [0.0] * length, + } +) +df.index.name = "fid" +model.flow_boundary.static.df = df + + +# %% write model +model.use_validation = True +model.write(ribasim_toml) + +# %% From 2559c7ec3493eadddb3561ec93442099a72e8d40 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:57:42 +0100 Subject: [PATCH 23/23] Update pre-commit hooks (#183) Update versions of pre-commit hooks to latest version. Co-authored-by: GitHub --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dc5a4fe..7393882 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: exclude: '.teamcity' - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.9 + rev: v0.7.1 hooks: - id: ruff types_or: [python, pyi, jupyter]