Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

calculate storage from Basin / profile #280

Merged
merged 4 commits into from
Jun 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Solves issue #?
Fixes #

# Description

Expand Down
2 changes: 1 addition & 1 deletion core/src/Ribasim.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ TimerOutputs.complement!()

include("validation.jl")
include("solve.jl")
include("utils.jl")
include("config.jl")
include("utils.jl")
include("lib.jl")
include("io.jl")
include("create.jl")
Expand Down
23 changes: 23 additions & 0 deletions core/src/config.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,29 @@ for sv in nodeschemas
push!(nodekinds[node], kind)
end

"""
Add fieldnames with Maybe{String} type to struct expression. Requires @option use before it.
"""
macro addfields(typ::Expr, fieldnames)
for fieldname in fieldnames
push!(typ.args[3].args, Expr(:(=), Expr(:(::), fieldname, Maybe{String}), nothing))
end
return esc(typ)
end

"""
Add all TableOption subtypes as fields to struct expression. Requires @option use before it.
"""
macro addnodetypes(typ::Expr)
for nodetype in nodetypes
push!(
typ.args[3].args,
Expr(:(=), Expr(:(::), nodetype, nodetype), Expr(:call, nodetype)),
)
end
return esc(typ)
end

# Generate structs for each nodetype for use in Config
abstract type TableOption end
for (T, kinds) in pairs(nodekinds)
Expand Down
16 changes: 0 additions & 16 deletions core/src/create.jl
Original file line number Diff line number Diff line change
Expand Up @@ -52,22 +52,6 @@ function ManningResistance(db::DB, config::Config)::ManningResistance
)
end

function create_storage_tables(db::DB, config::Config)
profiles = load_structvector(db, config, BasinProfileV1)
area = Interpolation[]
level = Interpolation[]
for group in IterTools.groupby(row -> row.node_id, profiles)
group_storage = getproperty.(group, :storage)
group_area = getproperty.(group, :area)
group_level = getproperty.(group, :level)
area_itp = LinearInterpolation(group_area, group_storage)
level_itp = LinearInterpolation(group_level, group_storage)
push!(area, area_itp)
push!(level, level_itp)
end
return area, level
end

function FractionalFlow(db::DB, config::Config)::FractionalFlow
static = load_structvector(db, config, FractionalFlowStaticV1)
return FractionalFlow(static.node_id, static.fraction)
Expand Down
46 changes: 29 additions & 17 deletions core/src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,27 +16,39 @@ function create_graph(
return graph, edge_ids, edge_types
end

"""
Add fieldnames with Maybe{String} type to struct expression. Requires @option use before it.
"""
macro addfields(typ::Expr, fieldnames)
for fieldname in fieldnames
push!(typ.args[3].args, Expr(:(=), Expr(:(::), fieldname, Maybe{String}), nothing))
"Calculate a profile storage by integrating the areas over the levels"
function profile_storage(levels::Vector{Float64}, areas::Vector{Float64})::Vector{Float64}
# profile starts at the bottom; first storage is 0
storages = zero(areas)
n = length(storages)

for i in 2:n
Δh = levels[i] - levels[i - 1]
avg_area = 0.5 * (areas[i - 1] + areas[i])
ΔS = avg_area * Δh
storages[i] = storages[i - 1] + ΔS
end
return esc(typ)
return storages
end

"""
Add all TableOption subtypes as fields to struct expression. Requires @option use before it.
"""
macro addnodetypes(typ::Expr)
for nodetype in nodetypes
push!(
typ.args[3].args,
Expr(:(=), Expr(:(::), nodetype, nodetype), Expr(:call, nodetype)),
)
"Read the Basin / profile table and return all A(S) and h(S) functions"
function create_storage_tables(
db::DB,
config::Config,
)::Tuple{Vector{Interpolation}, Vector{Interpolation}}
profiles = load_structvector(db, config, BasinProfileV1)
area = Interpolation[]
level = Interpolation[]
for group in IterTools.groupby(row -> row.node_id, profiles)
group_area = getproperty.(group, :area)
group_level = getproperty.(group, :level)
group_storage = profile_storage(group_level, group_area)
area_itp = LinearInterpolation(group_area, group_storage)
level_itp = LinearInterpolation(group_level, group_storage)
push!(area, area_itp)
push!(level, level_itp)
end
return esc(typ)
return area, level
end

"""
Expand Down
9 changes: 5 additions & 4 deletions core/src/validation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ end

@version BasinProfileV1 begin
node_id::Int
storage::Float64
area::Float64
level::Float64
end
Expand Down Expand Up @@ -165,19 +164,21 @@ end
sort_by_id(row) = row.node_id
sort_by_time_id(row) = (row.time, row.node_id)
sort_by_id_level(row) = (row.node_id, row.level)
sort_by_id_storage(row) = (row.node_id, row.storage)

# get the right sort by function given the Schema, with sort_by_id as the default
sort_by_function(table::StructVector{<:Legolas.AbstractRecord}) = sort_by_id
sort_by_function(table::StructVector{TabulatedRatingCurveStaticV1}) = sort_by_id_level
sort_by_function(table::StructVector{BasinProfileV1}) = sort_by_id_storage

const TimeSchemas = Union{TabulatedRatingCurveTimeV1, BasinForcingV1}
const LevelLookupSchemas = Union{TabulatedRatingCurveStaticV1, BasinProfileV1}

function sort_by_function(table::StructVector{<:TimeSchemas})
return sort_by_time_id
end

function sort_by_function(table::StructVector{<:LevelLookupSchemas})
return sort_by_id_level
end

"""
Depending on if a table can be sorted, either sort it or assert that it is sorted.

Expand Down
6 changes: 3 additions & 3 deletions core/test/basin.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ using SciMLBase
model = Ribasim.run(toml_path)
@test model isa Ribasim.Model
@test model.integrator.sol.retcode == Ribasim.ReturnCode.Success
@test model.integrator.sol.u[end] ≈ Float32[948.93713, 948.96857, 1.8637465, 1573.5077] skip =
@test model.integrator.sol.u[end] ≈ Float32[476.8749, 476.85898, 1.8706497, 786.96686] skip =
Sys.isapple()
end

Expand All @@ -20,7 +20,7 @@ end
@test model isa Ribasim.Model
@test model.integrator.sol.retcode == Ribasim.ReturnCode.Success
@test length(model.integrator.p.basin.precipitation) == 4
@test model.integrator.sol.u[end] ≈ Float32[915.72656, 915.701, 1.3522862, 1583.116] skip =
@test model.integrator.sol.u[end] ≈ Float32[480.5936, 480.58762, 1.4178488, 793.6097] skip =
Sys.isapple()
end

Expand All @@ -31,7 +31,7 @@ end
model = Ribasim.run(toml_path)
@test model isa Ribasim.Model
@test model.integrator.sol.retcode == Ribasim.ReturnCode.Success
@test model.integrator.sol.u[end] ≈ Float32[54.459435, 313.50992] skip = Sys.isapple()
@test model.integrator.sol.u[end] ≈ Float32[27.273159, 340.68964] skip = Sys.isapple()
# the highest level in the dynamic table is updated to 1.2 from the callback
@test model.integrator.p.tabulated_rating_curve.tables[end].t[end] == 1.2
end
7 changes: 7 additions & 0 deletions core/test/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,10 @@ using Test
@test Ribasim.id_index(ids, 4) === (true, 2)
@test Ribasim.id_index(ids, 5) === (false, 0)
end

@testset "profile_storage" begin
@test Ribasim.profile_storage([0.0, 1.0], [0.0, 1000.0]) == [0.0, 500.0]
@test Ribasim.profile_storage([6.0, 7.0], [0.0, 1000.0]) == [0.0, 500.0]
@test Ribasim.profile_storage([6.0, 7.0, 9.0], [0.0, 1000.0, 1000.0]) ==
[0.0, 500.0, 2500.0]
end
2 changes: 1 addition & 1 deletion docs/contribute/addnode.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ Future: Test for the specific behaviour of each node type
There are several parts of the documentation which should be updated with the new node type:

- `docs/core/equations` should contain a short explanation and if possible an analytical expression for the behaviour of the new node;
- `docs/core/usage.qmd` should conntain a short explanation of the node and the possible schemas associated with it;
- `docs/core/usage.qmd` should contain a short explanation of the node and the possible schemas associated with it;
- The example models constructed in `docs/python/examples.ipynb` should be extended with the new node type or a new example model with the new node type should be made.

# Finishing up
Expand Down
22 changes: 12 additions & 10 deletions docs/core/usage.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -223,22 +223,24 @@ The profile table defines the physical dimensions of the storage reservoir of ea
column | type | unit | restriction
--------- | ------- | ------------ | -----------
node_id | Int | - | sorted
storage | Float64 | $m^3$ | non-negative, per node_id: start at 0 and increasing
area | Float64 | $m^2$ | non-negative
level | Float64 | $m$ | -
area | Float64 | $m^2$ | non-negative, per node_id: start at 0 and increasing
level | Float64 | $m$ | per node_id: increasing

The level is the level at the basin outlet. All levels are defined in meters above a datum
that is the same for the entire model. An example of the first 5 rows of such a table is
given below. The first 4 rows define the profile of ID `2`. The number of rows can vary
per ID. Using a very large number of rows may impact performance.

node_id | storage | area | level
------- |--------------- |-------------- |-------
2 | 0.0 | 1.36404e5 | -0.105
2 | 24726.2 | 1.36404e5 | 0.095
2 | 49452.5 | 1.36404e5 | 0.295
2 | 2.49735e6 | 1.36404e5 | 20.095
3 | 0.0 | 50663.3 | 2.129
node_id | area | level
------- |------- |-------
2 | 0.0 | 6.0
2 | 1000.0 | 7.0
2 | 1000.0 | 9.0
3 | 0.0 | 2.2

We use the symbol $A$ for area, $h$ for level and $S$ for storage.
The profile provides a function $A(h)$ for each basin.
Internally this get converted to two functions, $A(S)$ and $h(S)$, by integrating over the function, setting the storage to zero for the bottom of the profile.

### TabulatedRatingCurve

Expand Down
Loading