Skip to content

Commit

Permalink
Merge branch 'main' into equal_ratio_allocation_objective
Browse files Browse the repository at this point in the history
  • Loading branch information
SouthEndMusic committed Apr 10, 2024
2 parents 5996b96 + 35dfc2c commit a56f844
Show file tree
Hide file tree
Showing 17 changed files with 185 additions and 58 deletions.
4 changes: 2 additions & 2 deletions core/src/bmi.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ function BMI.update_until(model::Model, time)::Model
return model
end

function BMI.get_value_ptr(model::Model, name::AbstractString)
function BMI.get_value_ptr(model::Model, name::AbstractString)::AbstractVector{Float64}
if name == "basin.storage"
model.integrator.u.storage
elseif name == "basin.level"
Expand All @@ -47,7 +47,7 @@ function BMI.get_value_ptr(model::Model, name::AbstractString)
elseif name == "basin.subgrid_level"
model.integrator.p.subgrid.level
elseif name == "user_demand.demand"
model.integrator.p.user_demand.demand
vec(model.integrator.p.user_demand.demand)
elseif name == "user_demand.realized"
model.integrator.p.user_demand.realized_bmi
else
Expand Down
33 changes: 29 additions & 4 deletions core/src/callback.jl
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ function create_callbacks(
SavingCallback(save_vertical_flux, saved_vertical_flux; saveat, save_start = false)
push!(callbacks, save_vertical_flux_cb)

# save the flows over time, as a Vector of the nonzeros(flow)
saved_flow = SavedValues(Float64, Vector{Float64})
# save the flows over time
saved_flow = SavedValues(Float64, SavedFlow)
save_flow_cb = SavingCallback(save_flow, saved_flow; saveat, save_start = false)
push!(callbacks, save_flow_cb)

Expand Down Expand Up @@ -138,14 +138,39 @@ end
"Compute the average flows over the last saveat interval and write
them to SavedValues"
function save_flow(u, t, integrator)
(; flow_integrated) = integrator.p.graph[]
(; graph) = integrator.p
(; flow_integrated, flow_dict) = graph[]
(; node_id) = integrator.p.basin

Δt = get_Δt(integrator)
flow_mean = copy(flow_integrated)
flow_mean ./= Δt
fill!(flow_integrated, 0.0)

return flow_mean
# Divide the flows over edges to Basin inflow and outflow, regardless of edge direction.
inflow_mean = zeros(length(node_id))
outflow_mean = zeros(length(node_id))

for (i, basin_id) in enumerate(node_id)
for in_id in inflow_ids(graph, basin_id)
q = flow_mean[flow_dict[in_id, basin_id]]
if q > 0
inflow_mean[i] += q
else
outflow_mean[i] -= q
end
end
for out_id in outflow_ids(graph, basin_id)
q = flow_mean[flow_dict[basin_id, out_id]]
if q > 0
outflow_mean[i] += q
else
inflow_mean[i] -= q
end
end
end

return SavedFlow(; flow = flow_mean, inflow = inflow_mean, outflow = outflow_mean)
end

"Compute the average vertical fluxes over the last saveat interval and write
Expand Down
2 changes: 1 addition & 1 deletion core/src/model.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
struct SavedResults{V1 <: ComponentVector{Float64}}
flow::SavedValues{Float64, Vector{Float64}}
flow::SavedValues{Float64, SavedFlow}
vertical_flux::SavedValues{Float64, V1}
subgrid_level::SavedValues{Float64, Vector{Float64}}
end
Expand Down
13 changes: 13 additions & 0 deletions core/src/parameter.jl
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,19 @@ end

abstract type AbstractParameterNode end

"""
In-memory storage of saved mean flows for writing to results.
- `flow`: The mean flows on all edges
- `inflow`: The sum of the mean flows coming into each basin
- `outflow`: The sum of the mean flows going out of each basin
"""
@kwdef struct SavedFlow
flow::Vector{Float64}
inflow::Vector{Float64}
outflow::Vector{Float64}
end

"""
Requirements:
Expand Down
11 changes: 10 additions & 1 deletion core/src/read.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1009,13 +1009,22 @@ function exists(db::DB, tablename::String)
return !isempty(query)
end

"""
seconds(period::Millisecond)::Float64
Convert a period of type Millisecond to a Float64 in seconds.
You get Millisecond objects when subtracting two DateTime objects.
Dates.value returns the number of milliseconds.
"""
seconds(period::Millisecond)::Float64 = 0.001 * Dates.value(period)

"""
seconds_since(t::DateTime, t0::DateTime)::Float64
Convert a DateTime to a float that is the number of seconds since the start of the
simulation. This is used to convert between the solver's inner float time, and the calendar.
"""
seconds_since(t::DateTime, t0::DateTime)::Float64 = 0.001 * Dates.value(t - t0)
seconds_since(t::DateTime, t0::DateTime)::Float64 = seconds(t - t0)

"""
datetime_since(t::Real, t0::DateTime)::DateTime
Expand Down
3 changes: 3 additions & 0 deletions core/src/util.jl
Original file line number Diff line number Diff line change
Expand Up @@ -534,6 +534,9 @@ function Base.getindex(fv::FlatVector, i::Int)
return v[r + 1]
end

"Construct a FlatVector from one of the fields of SavedFlow."
FlatVector(saveval::Vector{SavedFlow}, sym::Symbol) = FlatVector(getfield.(saveval, sym))

"""
Function that goes smoothly from 0 to 1 in the interval [0,threshold],
and is constant outside this interval.
Expand Down
41 changes: 35 additions & 6 deletions core/src/write.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,33 +84,40 @@ function basin_table(
node_id::Vector{Int32},
storage::Vector{Float64},
level::Vector{Float64},
inflow_rate::Vector{Float64},
outflow_rate::Vector{Float64},
storage_rate::Vector{Float64},
precipitation::Vector{Float64},
evaporation::Vector{Float64},
drainage::Vector{Float64},
infiltration::Vector{Float64},
balance_error::Vector{Float64},
relative_error::Vector{Float64},
}
(; saved) = model
(; vertical_flux) = saved

# The last timestep is not included; there is no period over which to compute flows.
data = get_storages_and_levels(model)
storage = vec(data.storage[:, begin:(end - 1)])
level = vec(data.level[:, begin:(end - 1)])
Δstorage = vec(diff(data.storage; dims = 2))

nbasin = length(data.node_id)
ntsteps = length(data.time) - 1
nrows = nbasin * ntsteps

inflow_rate = FlatVector(saved.flow.saveval, :inflow)
outflow_rate = FlatVector(saved.flow.saveval, :outflow)
precipitation = zeros(nrows)
evaporation = zeros(nrows)
drainage = zeros(nrows)
infiltration = zeros(nrows)
balance_error = zeros(nrows)
relative_error = zeros(nrows)

idx_row = 0

for vec in vertical_flux.saveval
for cvec in saved.vertical_flux.saveval
for (precipitation_, evaporation_, drainage_, infiltration_) in
zip(vec.precipitation, vec.evaporation, vec.drainage, vec.infiltration)
zip(cvec.precipitation, cvec.evaporation, cvec.drainage, cvec.infiltration)
idx_row += 1
precipitation[idx_row] = precipitation_
evaporation[idx_row] = evaporation_
Expand All @@ -120,17 +127,39 @@ function basin_table(
end

time = repeat(data.time[begin:(end - 1)]; inner = nbasin)
Δtime_seconds = seconds.(diff(data.time))
Δtime = repeat(Δtime_seconds; inner = nbasin)
node_id = repeat(Int32.(data.node_id); outer = ntsteps)
storage_rate = Δstorage ./ Δtime

for i in 1:nrows
storage_flow = storage_rate[i]
storage_increase = max(storage_flow, 0.0)
storage_decrease = max(-storage_flow, 0.0)

total_in = inflow_rate[i] + precipitation[i] + drainage[i] - storage_increase
total_out = outflow_rate[i] + evaporation[i] + infiltration[i] - storage_decrease
balance_error[i] = total_in - total_out
mean_flow_rate = 0.5 * (total_in + total_out)
if mean_flow_rate != 0
relative_error[i] = balance_error[i] / mean_flow_rate
end
end

return (;
time,
node_id,
storage,
level,
inflow_rate,
outflow_rate,
storage_rate,
precipitation,
evaporation,
drainage,
infiltration,
balance_error,
relative_error,
)
end

Expand Down Expand Up @@ -184,7 +213,7 @@ function flow_table(
from_node_id = repeat(from_node_id; outer = ntsteps)
to_node_type = repeat(to_node_type; outer = ntsteps)
to_node_id = repeat(to_node_id; outer = ntsteps)
flow_rate = FlatVector(saveval)
flow_rate = FlatVector(saveval, :flow)

return (;
time,
Expand Down
2 changes: 1 addition & 1 deletion core/test/bmi_test.jl
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ end
BMI.update_until(model, 86400.0)
value_second = BMI.get_value_ptr(model, name)
# get_value_ptr does not copy
@test value_first === value_second
@test value_first === value_second || pointer(value_first) == pointer(value_second)
end
end

Expand Down
35 changes: 32 additions & 3 deletions core/test/run_models_test.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@

toml_path = normpath(@__DIR__, "../../generated_testmodels/trivial/ribasim.toml")
@test ispath(toml_path)
model = Ribasim.run(toml_path)
config = Ribasim.Config(toml_path)
model = Ribasim.run(config)
@test model isa Ribasim.Model
@test successful_retcode(model)
(; p) = model.integrator
Expand Down Expand Up @@ -49,12 +50,31 @@
:node_id,
:storage,
:level,
:inflow_rate,
:outflow_rate,
:storage_rate,
:precipitation,
:evaporation,
:drainage,
:infiltration,
:balance_error,
:relative_error,
),
(
DateTime,
Int32,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
),
(DateTime, Int32, Float64, Float64, Float64, Float64, Float64, Float64),
)
@test Tables.schema(control) == Tables.Schema(
(:time, :control_node_id, :truth_state, :control_state),
Expand Down Expand Up @@ -107,12 +127,21 @@

@testset "Results values" begin
@test flow.time[1] == DateTime(2020)
@test coalesce.(flow.edge_id[1:2], -1) == [1, 2]
@test coalesce.(flow.edge_id[1:2], -1) == [0, 1]
@test flow.from_node_id[1:2] == [6, 0]
@test flow.to_node_id[1:2] == [0, 922]

@test basin.storage[1] 1.0
@test basin.level[1] 0.044711584
@test basin.storage_rate[1]
(basin.storage[2] - basin.storage[1]) / config.solver.saveat
@test all(==(0), basin.inflow_rate)
@test all(>(0), basin.outflow_rate)
@test flow.flow_rate[1] == basin.outflow_rate[1]
@test all(==(0), basin.drainage)
@test all(==(0), basin.infiltration)
@test all(q -> abs(q) < 1e-7, basin.balance_error)
@test all(q -> abs(q) < 0.01, basin.relative_error)

# The exporter interpolates 1:1 for three subgrid elements, but shifted by 1.0 meter.
basin_level = basin.level[1]
Expand Down
4 changes: 2 additions & 2 deletions core/test/validation_test.jl
Original file line number Diff line number Diff line change
Expand Up @@ -361,10 +361,10 @@ end
@test length(logger.logs) == 2
@test logger.logs[1].level == Error
@test logger.logs[1].message ==
"Invalid edge type 'foo' for edge #1 from node #1 to node #2."
"Invalid edge type 'foo' for edge #0 from node #1 to node #2."
@test logger.logs[2].level == Error
@test logger.logs[2].message ==
"Invalid edge type 'bar' for edge #2 from node #2 to node #3."
"Invalid edge type 'bar' for edge #1 from node #2 to node #3."
end

@testitem "Subgrid validation" begin
Expand Down
44 changes: 27 additions & 17 deletions docs/core/usage.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -707,23 +707,33 @@ derivative | Float64 | - | -

The Basin table contains:

- Results of the storage and level of each basin, which are instantaneous values;
- Results of the vertical fluxes on each basin, which are mean values over the `saveat` intervals. In the time column the start of the period is indicated.
- The final state of the model is not part of this file, and will be placed in a separate output state file in the future.

The initial condition is also written to the file.

column | type | unit
------------- | ---------| ----
time | DateTime | -
node_id | Int32 | -
storage | Float64 | $m^3$
level | Float64 | $m$
precipitation | Float64 | $m^3 s^{-1}$
evaporation | Float64 | $m^3 s^{-1}$
drainage | Float64 | $m^3 s^{-1}$
infiltration | Float64 | $m^3 s^{-1}$

- Results of the storage and level of each Basin, which are instantaneous values;
- Results of the fluxes on each Basin, which are mean values over the `saveat` intervals.
In the time column the start of the period is indicated.
- The initial condition is written to the file, but the final state is not.
It will be placed in a separate output state file in the future.
- The `inflow_rate` and `outflow_rate` are the sum of the flows from other nodes into and out of the Basin respectively.
The actual flows determine in which term they are counted, not the edge direction.
- The `storage_rate` is flow that adds to the storage in the Basin, increasing the water level. In the equations below this number is split out into two non-negative numbers, `storage_increase` and `storage_decrease`.
- The `balance_error` is the difference of all Basin inflows (`total_inflow`) and outflows (`total_outflow`), that is (`inflow_rate` + `precipitation` + `drainage` - `storage_increase`) - (`outflow_rate` + `evaporation` + `infiltration` - `storage_decrease`).
It can be used to check if the numerical error when solving the water balance is sufficiently small.
- The `relative_error` is the fraction of the `balance_error` over the mean of the `total_inflow` and `total_outflow`.

column | type | unit
-------------- | ---------| ----
time | DateTime | -
node_id | Int32 | -
storage | Float64 | $m^3$
level | Float64 | $m$
inflow_rate | Float64 | $m^3 s^{-1}$
outflow_rate | Float64 | $m^3 s^{-1}$
storage_rate | Float64 | $m^3 s^{-1}$
precipitation | Float64 | $m^3 s^{-1}$
evaporation | Float64 | $m^3 s^{-1}$
drainage | Float64 | $m^3 s^{-1}$
infiltration | Float64 | $m^3 s^{-1}$
balance_error | Float64 | $m^3 s^{-1}$
relative_error | Float64 | -

The table is sorted by time, and per time it is sorted by `node_id`.

Expand Down
17 changes: 8 additions & 9 deletions python/ribasim/ribasim/geometry/edge.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,21 +76,20 @@ def add(
crs=self.df.crs,
)

self.df = GeoDataFrame[EdgeSchema](pd.concat([self.df, table_to_append]))
self.df = GeoDataFrame[EdgeSchema](
pd.concat([self.df, table_to_append], ignore_index=True)
)
self.df.index.name = "fid"

def get_where_edge_type(self, edge_type: str) -> NDArray[np.bool_]:
assert self.df is not None
return (self.df.edge_type == edge_type).to_numpy()

def sort(self):
assert self.df is not None
sort_keys = [
"from_node_type",
"from_node_id",
"to_node_type",
"to_node_id",
]
self.df.sort_values(sort_keys, ignore_index=True, inplace=True)
# Only sort the index (fid / edge_id) since this needs to be sorted in a GeoPackage.
# Under most circumstances, this retains the input order,
# making the edge_id as stable as possible; useful for post-processing.
self.df.sort_index(inplace=True)

def plot(self, **kwargs) -> Axes:
assert self.df is not None
Expand Down
Loading

0 comments on commit a56f844

Please sign in to comment.