Skip to content

Commit

Permalink
calculate storage from Basin / profile (#280)
Browse files Browse the repository at this point in the history
Fixes #225

This removes the need to provide the storage for a profile. We can
calculate it ourselves by integrating over A(h) to get A(S) and h(S).

First this updates the test models to a correct storage in the profile.
Then we calculate the profile and get the same results. Then we remove
the storage from the schemas and docs.

I hoped this would help with #80 but based on the plot I added there it
doesn't seem to. This does not yet add all the validation and
extrapolation for lookup tables as described in #279, that can be done
separately.
  • Loading branch information
visr authored Jun 14, 2023
1 parent fe2b09d commit fb54b00
Show file tree
Hide file tree
Showing 16 changed files with 87 additions and 68 deletions.
2 changes: 1 addition & 1 deletion .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Solves issue #?
Fixes #

# Description

Expand Down
2 changes: 1 addition & 1 deletion core/src/Ribasim.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ TimerOutputs.complement!()

include("validation.jl")
include("solve.jl")
include("utils.jl")
include("config.jl")
include("utils.jl")
include("lib.jl")
include("io.jl")
include("create.jl")
Expand Down
23 changes: 23 additions & 0 deletions core/src/config.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,29 @@ for sv in nodeschemas
push!(nodekinds[node], kind)
end

"""
Add fieldnames with Maybe{String} type to struct expression. Requires @option use before it.
"""
macro addfields(typ::Expr, fieldnames)
for fieldname in fieldnames
push!(typ.args[3].args, Expr(:(=), Expr(:(::), fieldname, Maybe{String}), nothing))
end
return esc(typ)
end

"""
Add all TableOption subtypes as fields to struct expression. Requires @option use before it.
"""
macro addnodetypes(typ::Expr)
for nodetype in nodetypes
push!(
typ.args[3].args,
Expr(:(=), Expr(:(::), nodetype, nodetype), Expr(:call, nodetype)),
)
end
return esc(typ)
end

# Generate structs for each nodetype for use in Config
abstract type TableOption end
for (T, kinds) in pairs(nodekinds)
Expand Down
16 changes: 0 additions & 16 deletions core/src/create.jl
Original file line number Diff line number Diff line change
Expand Up @@ -52,22 +52,6 @@ function ManningResistance(db::DB, config::Config)::ManningResistance
)
end

function create_storage_tables(db::DB, config::Config)
profiles = load_structvector(db, config, BasinProfileV1)
area = Interpolation[]
level = Interpolation[]
for group in IterTools.groupby(row -> row.node_id, profiles)
group_storage = getproperty.(group, :storage)
group_area = getproperty.(group, :area)
group_level = getproperty.(group, :level)
area_itp = LinearInterpolation(group_area, group_storage)
level_itp = LinearInterpolation(group_level, group_storage)
push!(area, area_itp)
push!(level, level_itp)
end
return area, level
end

function FractionalFlow(db::DB, config::Config)::FractionalFlow
static = load_structvector(db, config, FractionalFlowStaticV1)
return FractionalFlow(static.node_id, static.fraction)
Expand Down
46 changes: 29 additions & 17 deletions core/src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,27 +19,39 @@ function create_graph(
return graph, edge_ids, edge_connection_types
end

"""
Add fieldnames with Maybe{String} type to struct expression. Requires @option use before it.
"""
macro addfields(typ::Expr, fieldnames)
for fieldname in fieldnames
push!(typ.args[3].args, Expr(:(=), Expr(:(::), fieldname, Maybe{String}), nothing))
"Calculate a profile storage by integrating the areas over the levels"
function profile_storage(levels::Vector{Float64}, areas::Vector{Float64})::Vector{Float64}
# profile starts at the bottom; first storage is 0
storages = zero(areas)
n = length(storages)

for i in 2:n
Δh = levels[i] - levels[i - 1]
avg_area = 0.5 * (areas[i - 1] + areas[i])
ΔS = avg_area * Δh
storages[i] = storages[i - 1] + ΔS
end
return esc(typ)
return storages
end

"""
Add all TableOption subtypes as fields to struct expression. Requires @option use before it.
"""
macro addnodetypes(typ::Expr)
for nodetype in nodetypes
push!(
typ.args[3].args,
Expr(:(=), Expr(:(::), nodetype, nodetype), Expr(:call, nodetype)),
)
"Read the Basin / profile table and return all A(S) and h(S) functions"
function create_storage_tables(
db::DB,
config::Config,
)::Tuple{Vector{Interpolation}, Vector{Interpolation}}
profiles = load_structvector(db, config, BasinProfileV1)
area = Interpolation[]
level = Interpolation[]
for group in IterTools.groupby(row -> row.node_id, profiles)
group_area = getproperty.(group, :area)
group_level = getproperty.(group, :level)
group_storage = profile_storage(group_level, group_area)
area_itp = LinearInterpolation(group_area, group_storage)
level_itp = LinearInterpolation(group_level, group_storage)
push!(area, area_itp)
push!(level, level_itp)
end
return esc(typ)
return area, level
end

"""
Expand Down
9 changes: 5 additions & 4 deletions core/src/validation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ end

@version BasinProfileV1 begin
node_id::Int
storage::Float64
area::Float64
level::Float64
end
Expand Down Expand Up @@ -195,19 +194,21 @@ end
sort_by_id(row) = row.node_id
sort_by_time_id(row) = (row.time, row.node_id)
sort_by_id_level(row) = (row.node_id, row.level)
sort_by_id_storage(row) = (row.node_id, row.storage)

# get the right sort by function given the Schema, with sort_by_id as the default
sort_by_function(table::StructVector{<:Legolas.AbstractRecord}) = sort_by_id
sort_by_function(table::StructVector{TabulatedRatingCurveStaticV1}) = sort_by_id_level
sort_by_function(table::StructVector{BasinProfileV1}) = sort_by_id_storage

const TimeSchemas = Union{TabulatedRatingCurveTimeV1, BasinForcingV1}
const LevelLookupSchemas = Union{TabulatedRatingCurveStaticV1, BasinProfileV1}

function sort_by_function(table::StructVector{<:TimeSchemas})
return sort_by_time_id
end

function sort_by_function(table::StructVector{<:LevelLookupSchemas})
return sort_by_id_level
end

"""
Depending on if a table can be sorted, either sort it or assert that it is sorted.
Expand Down
6 changes: 3 additions & 3 deletions core/test/basin.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ using SciMLBase
model = Ribasim.run(toml_path)
@test model isa Ribasim.Model
@test model.integrator.sol.retcode == Ribasim.ReturnCode.Success
@test model.integrator.sol.u[end] Float32[948.93713, 948.96857, 1.8637465, 1573.5077] skip =
@test model.integrator.sol.u[end] Float32[476.8749, 476.85898, 1.8706497, 786.96686] skip =
Sys.isapple()
end

Expand All @@ -20,7 +20,7 @@ end
@test model isa Ribasim.Model
@test model.integrator.sol.retcode == Ribasim.ReturnCode.Success
@test length(model.integrator.p.basin.precipitation) == 4
@test model.integrator.sol.u[end] Float32[915.72656, 915.701, 1.3522862, 1583.116] skip =
@test model.integrator.sol.u[end] Float32[480.5936, 480.58762, 1.4178488, 793.6097] skip =
Sys.isapple()
end

Expand All @@ -31,7 +31,7 @@ end
model = Ribasim.run(toml_path)
@test model isa Ribasim.Model
@test model.integrator.sol.retcode == Ribasim.ReturnCode.Success
@test model.integrator.sol.u[end] Float32[54.459435, 313.50992] skip = Sys.isapple()
@test model.integrator.sol.u[end] Float32[27.273159, 340.68964] skip = Sys.isapple()
# the highest level in the dynamic table is updated to 1.2 from the callback
@test model.integrator.p.tabulated_rating_curve.tables[end].t[end] == 1.2
end
7 changes: 7 additions & 0 deletions core/test/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,10 @@ using Test
@test Ribasim.id_index(ids, 4) === (true, 2)
@test Ribasim.id_index(ids, 5) === (false, 0)
end

@testset "profile_storage" begin
@test Ribasim.profile_storage([0.0, 1.0], [0.0, 1000.0]) == [0.0, 500.0]
@test Ribasim.profile_storage([6.0, 7.0], [0.0, 1000.0]) == [0.0, 500.0]
@test Ribasim.profile_storage([6.0, 7.0, 9.0], [0.0, 1000.0, 1000.0]) ==
[0.0, 500.0, 2500.0]
end
2 changes: 1 addition & 1 deletion docs/contribute/addnode.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ Future: Test for the specific behaviour of each node type
There are several parts of the documentation which should be updated with the new node type:

- `docs/core/equations` should contain a short explanation and if possible an analytical expression for the behaviour of the new node;
- `docs/core/usage.qmd` should conntain a short explanation of the node and the possible schemas associated with it;
- `docs/core/usage.qmd` should contain a short explanation of the node and the possible schemas associated with it;
- The example models constructed in `docs/python/examples.ipynb` should be extended with the new node type or a new example model with the new node type should be made.

# Finishing up
Expand Down
22 changes: 12 additions & 10 deletions docs/core/usage.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -235,22 +235,24 @@ The profile table defines the physical dimensions of the storage reservoir of ea
column | type | unit | restriction
--------- | ------- | ------------ | -----------
node_id | Int | - | sorted
storage | Float64 | $m^3$ | non-negative, per node_id: start at 0 and increasing
area | Float64 | $m^2$ | non-negative
level | Float64 | $m$ | -
area | Float64 | $m^2$ | non-negative, per node_id: start at 0 and increasing
level | Float64 | $m$ | per node_id: increasing

The level is the level at the basin outlet. All levels are defined in meters above a datum
that is the same for the entire model. An example of the first 5 rows of such a table is
given below. The first 4 rows define the profile of ID `2`. The number of rows can vary
per ID. Using a very large number of rows may impact performance.

node_id | storage | area | level
------- |--------------- |-------------- |-------
2 | 0.0 | 1.36404e5 | -0.105
2 | 24726.2 | 1.36404e5 | 0.095
2 | 49452.5 | 1.36404e5 | 0.295
2 | 2.49735e6 | 1.36404e5 | 20.095
3 | 0.0 | 50663.3 | 2.129
node_id | area | level
------- |------- |-------
2 | 0.0 | 6.0
2 | 1000.0 | 7.0
2 | 1000.0 | 9.0
3 | 0.0 | 2.2

We use the symbol $A$ for area, $h$ for level and $S$ for storage.
The profile provides a function $A(h)$ for each basin.
Internally this get converted to two functions, $A(S)$ and $h(S)$, by integrating over the function, setting the storage to zero for the bottom of the profile.

### FractionalFlow

Expand Down
4 changes: 2 additions & 2 deletions docs/python/examples.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@
"profile = pd.DataFrame(\n",
" data={\n",
" \"node_id\": [1, 1, 3, 3, 6, 6, 9, 9],\n",
" \"storage\": [0.0, 1000.0] * 4,\n",
" \"storage\": [0.0, 500.0] * 4,\n",
" \"area\": [0.0, 1000.0] * 4,\n",
" \"level\": [0.0, 1.0] * 4,\n",
" }\n",
Expand Down Expand Up @@ -1000,7 +1000,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
"version": "3.10.11"
}
},
"nbformat": 4,
Expand Down
6 changes: 0 additions & 6 deletions docs/schema/BasinProfile.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,6 @@
"description": "area",
"type": "number"
},
"storage": {
"format": "double",
"description": "storage",
"type": "number"
},
"node_id": {
"format": "default",
"description": "node_id",
Expand All @@ -30,7 +25,6 @@
},
"required": [
"node_id",
"storage",
"area",
"level"
],
Expand Down
1 change: 0 additions & 1 deletion python/ribasim/ribasim/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ class BasinState(BaseModel):
class BasinProfile(BaseModel):
remarks: Optional[str] = Field("", description="a hack for pandera")
area: float = Field(..., description="area")
storage: float = Field(..., description="storage")
node_id: int = Field(..., description="node_id")
level: float = Field(..., description="level")

Expand Down
4 changes: 1 addition & 3 deletions python/ribasim/ribasim/node_types/basin.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,7 @@ class Config:
validate_assignment = True

def sort(self):
self.profile = self.profile.sort_values(
["node_id", "storage"], ignore_index=True
)
self.profile = self.profile.sort_values(["node_id", "level"], ignore_index=True)
if self.static is not None:
self.static = self.static.sort_values("node_id", ignore_index=True)
if self.forcing is not None:
Expand Down
4 changes: 2 additions & 2 deletions python/ribasim_testmodels/ribasim_testmodels/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def basic_model() -> ribasim.Model:
profile = pd.DataFrame(
data={
"node_id": [1, 1, 3, 3, 6, 6, 9, 9],
"storage": [0.0, 1000.0] * 4,
"storage": [0.0, 500.0] * 4,
"area": [0.0, 1000.0] * 4,
"level": [0.0, 1.0] * 4,
}
Expand Down Expand Up @@ -323,7 +323,7 @@ def tabulated_rating_curve_model() -> ribasim.Model:
profile = pd.DataFrame(
data={
"node_id": [1, 1, 4, 4],
"storage": [0.0, 1000.0] * 2,
"storage": [0.0, 500.0] * 2,
"area": [0.0, 1000.0] * 2,
"level": [0.0, 1.0] * 2,
}
Expand Down
1 change: 0 additions & 1 deletion qgis/core/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,6 @@ class BasinProfile(Input):
geometry_type = "No Geometry"
attributes = [
QgsField("node_id", QVariant.Int),
QgsField("storage", QVariant.Double),
QgsField("area", QVariant.Double),
QgsField("level", QVariant.Double),
]
Expand Down

0 comments on commit fb54b00

Please sign in to comment.