diff --git a/Manifest.toml b/Manifest.toml index 282ce9d77..1d93a18bc 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -1248,7 +1248,7 @@ version = "3.5.14" deps = ["Accessors", "Arrow", "BasicModelInterface", "CodecZstd", "ComponentArrays", "Configurations", "DBInterface", "DataInterpolations", "DataStructures", "Dates", "Dictionaries", "DiffEqCallbacks", "EnumX", "FiniteDiff", "ForwardDiff", "Graphs", "HiGHS", "IterTools", "JuMP", "Legolas", "LinearSolve", "Logging", "LoggingExtras", "MetaGraphsNext", "OrdinaryDiffEq", "PreallocationTools", "SQLite", "SciMLBase", "SparseArrays", "StructArrays", "Tables", "TerminalLoggers", "TimerOutputs", "TranscodingStreams"] path = "core" uuid = "aac5e3d9-0b8f-4d4f-8241-b1a7a9632635" -version = "2024.7.0" +version = "2024.8.0" [[deps.RuntimeGeneratedFunctions]] deps = ["ExprTools", "SHA", "Serialization"] diff --git a/build/cli/Cargo.lock b/build/cli/Cargo.lock index 0fe889c8f..f5edf5896 100644 --- a/build/cli/Cargo.lock +++ b/build/cli/Cargo.lock @@ -144,7 +144,7 @@ dependencies = [ [[package]] name = "ribasim" -version = "2024.7.0" +version = "2024.8.0" dependencies = [ "clap", "libc", diff --git a/build/cli/Cargo.toml b/build/cli/Cargo.toml index 82b19f5ba..5cfcd87fa 100644 --- a/build/cli/Cargo.toml +++ b/build/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ribasim" -version = "2024.7.0" +version = "2024.8.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/core/Project.toml b/core/Project.toml index 45441c4ed..6d575d960 100644 --- a/core/Project.toml +++ b/core/Project.toml @@ -2,7 +2,7 @@ name = "Ribasim" uuid = "aac5e3d9-0b8f-4d4f-8241-b1a7a9632635" authors = ["Deltares and contributors "] manifest = "../Manifest.toml" -version = "2024.7.0" +version = "2024.8.0" [deps] Accessors = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697" @@ -51,7 +51,7 @@ ComponentArrays = "0.13,0.14,0.15" Configurations = "0.17" DBInterface = "2.4" DataFrames = "1.4" -DataInterpolations = "4.4, 5" +DataInterpolations = "4.4" DataStructures = "0.18" Dates = "<0.0.1,1" Dictionaries = "0.3.25, 0.4" diff --git a/core/src/callback.jl b/core/src/callback.jl index 74ae2b345..908e21b03 100644 --- a/core/src/callback.jl +++ b/core/src/callback.jl @@ -125,9 +125,7 @@ function integrate_flows!(u, t, integrator)::Nothing else # Horizontal flows value[] += - 0.5 * - (get_flow(graph, edge..., 0) + get_flow(graph, edge..., 0; prev = true)) * - dt + 0.5 * (get_flow(graph, edge..., 0) + get_flow_prev(graph, edge..., 0)) * dt end end diff --git a/core/src/graph.jl b/core/src/graph.jl index edcd8f3cd..8351dd92d 100644 --- a/core/src/graph.jl +++ b/core/src/graph.jl @@ -18,10 +18,11 @@ function create_graph(db::DB, config::Config, chunk_sizes::Vector{Int})::MetaGra node_ids = Dict{Int32, Set{NodeID}}() # Source edges per subnetwork edges_source = Dict{Int32, Set{EdgeMetadata}}() - # The number of flow edges + # The flow counter gives a unique consecutive id to the + # flow edges to index the flow vectors flow_counter = 0 # Dictionary from flow edge to index in flow vector - flow_dict = Dict{Tuple{NodeID, NodeID}, Int}() + flow_dict = Dict{Tuple{NodeID, NodeID}, Int32}() graph = MetaGraph( DiGraph(); label_type = NodeID, @@ -65,7 +66,15 @@ function create_graph(db::DB, config::Config, chunk_sizes::Vector{Int})::MetaGra if ismissing(subnetwork_id) subnetwork_id = 0 end - edge_metadata = EdgeMetadata(fid, edge_type, subnetwork_id, (id_src, id_dst)) + edge_metadata = EdgeMetadata( + fid, + edge_type == EdgeType.flow ? flow_counter + 1 : 0, + edge_type, + subnetwork_id, + (id_src, id_dst), + -1, + -1, + ) if haskey(graph, id_src, id_dst) errors = true @error "Duplicate edge" id_src id_dst @@ -169,24 +178,55 @@ end Set the given flow q over the edge between the given nodes. """ function set_flow!(graph::MetaGraph, id_src::NodeID, id_dst::NodeID, q::Number)::Nothing - (; flow_dict, flow) = graph[] - get_tmp(flow, q)[flow_dict[(id_src, id_dst)]] = q + (; flow_dict) = graph[] + flow_idx = flow_dict[(id_src, id_dst)] + set_flow!(graph, flow_idx, q) + return nothing +end + +function set_flow!(graph::MetaGraph, edge_metadata::EdgeMetadata, q::Number)::Nothing + set_flow!(graph, edge_metadata.flow_idx, q) + return nothing +end + +function set_flow!(graph, flow_idx::Int32, q::Number)::Nothing + (; flow) = graph[] + get_tmp(flow, q)[flow_idx] = q return nothing end """ Get the flow over the given edge (val is needed for get_tmp from ForwardDiff.jl). """ -function get_flow( - graph::MetaGraph, - id_src::NodeID, - id_dst::NodeID, - val; - prev::Bool = false, -)::Number - (; flow_dict, flow, flow_prev) = graph[] - flow_vector = prev ? flow_prev : flow - return get_tmp(flow_vector, val)[flow_dict[id_src, id_dst]] +function get_flow(graph::MetaGraph, id_src::NodeID, id_dst::NodeID, val)::Number + (; flow_dict) = graph[] + flow_idx = flow_dict[id_src, id_dst] + return get_flow(graph, flow_idx, val) +end + +function get_flow(graph, edge_metadata::EdgeMetadata, val)::Number + return get_flow(graph, edge_metadata.flow_idx, val) +end + +function get_flow(graph::MetaGraph, flow_idx::Integer, val) + return get_tmp(graph[].flow, val)[flow_idx] +end + +function get_flow_prev(graph, id_src::NodeID, id_dst::NodeID, val)::Number + # Note: Can be removed after https://github.com/Deltares/Ribasim/pull/1444 + (; flow_dict) = graph[] + flow_idx = flow_dict[id_src, id_dst] + return get_flow(graph, flow_idx, val) +end + +function get_flow_prev(graph, edge_metadata::EdgeMetadata, val)::Number + # Note: Can be removed after https://github.com/Deltares/Ribasim/pull/1444 + return get_flow_prev(graph, edge_metadata.flow_idx, val) +end + +function get_flow_prev(graph::MetaGraph, flow_idx::Integer, val) + # Note: Can be removed after https://github.com/Deltares/Ribasim/pull/1444 + return get_tmp(graph[].flow_prev, val)[flow_idx] end """ diff --git a/core/src/parameter.jl b/core/src/parameter.jl index 5a3672b17..ccb017ca2 100644 --- a/core/src/parameter.jl +++ b/core/src/parameter.jl @@ -116,16 +116,22 @@ end """ Type for storing metadata of edges in the graph: id: ID of the edge (only used for labeling flow output) +flow_idx: Index in the vector of flows type: type of the edge subnetwork_id_source: ID of subnetwork where this edge is a source (0 if not a source) edge: (from node ID, to node ID) +basin_idx_src: Basin index of source node (0 if not a basin) +basin_idx_dst: Basin index of destination node (0 if not a basin) """ struct EdgeMetadata id::Int32 + flow_idx::Int32 type::EdgeType.T subnetwork_id_source::Int32 edge::Tuple{NodeID, NodeID} + basin_idx_src::Int32 + basin_idx_dst::Int32 end abstract type AbstractParameterNode end @@ -233,8 +239,10 @@ Type parameter C indicates the content backing the StructVector, which can be a of Vectors or Arrow Primitives, and is added to avoid type instabilities. node_id: node ID of the TabulatedRatingCurve node -inflow_id: node ID across the incoming flow edge -outflow_ids: node IDs across the outgoing flow edges +inflow_edge: incoming flow edge metadata + The ID of the destination node is always the ID of the TabulatedRatingCurve node +outflow_edges: outgoing flow edges metadata + The ID of the source node is always the ID of the TabulatedRatingCurve node active: whether this node is active and thus contributes flows tables: The current Q(h) relationships time: The time table used for updating the tables @@ -242,8 +250,8 @@ control_mapping: dictionary from (node_id, control_state) to Q(h) and/or active """ struct TabulatedRatingCurve{C} <: AbstractParameterNode node_id::Vector{NodeID} - inflow_id::Vector{NodeID} - outflow_ids::Vector{Vector{NodeID}} + inflow_edge::Vector{EdgeMetadata} + outflow_edges::Vector{Vector{EdgeMetadata}} active::BitVector tables::Vector{ScalarInterpolation} time::StructVector{TabulatedRatingCurveTimeV1, C, Int} @@ -252,8 +260,10 @@ end """ node_id: node ID of the LinearResistance node -inflow_id: node ID across the incoming flow edge -outflow_id: node ID across the outgoing flow edge +inflow_edge: incoming flow edge metadata + The ID of the destination node is always the ID of the LinearResistance node +outflow_edge: outgoing flow edge metadata + The ID of the source node is always the ID of the LinearResistance node active: whether this node is active and thus contributes flows resistance: the resistance to flow; `Q_unlimited = Δh/resistance` max_flow_rate: the maximum flow rate allowed through the node; `Q = clamp(Q_unlimited, -max_flow_rate, max_flow_rate)` @@ -261,8 +271,8 @@ control_mapping: dictionary from (node_id, control_state) to resistance and/or a """ struct LinearResistance <: AbstractParameterNode node_id::Vector{NodeID} - inflow_id::Vector{NodeID} - outflow_id::Vector{NodeID} + inflow_edge::Vector{EdgeMetadata} + outflow_edge::Vector{EdgeMetadata} active::BitVector resistance::Vector{Float64} max_flow_rate::Vector{Float64} @@ -273,8 +283,10 @@ end This is a simple Manning-Gauckler reach connection. node_id: node ID of the ManningResistance node -inflow_id: node ID across the incoming flow edge -outflow_id: node ID across the outgoing flow edge +inflow_edge: incoming flow edge metadata + The ID of the destination node is always the ID of the ManningResistance node +outflow_edge: outgoing flow edge metadata + The ID of the source node is always the ID of the ManningResistance node length: reach length manning_n: roughness; Manning's n in (SI units). @@ -307,33 +319,31 @@ Requirements: """ struct ManningResistance <: AbstractParameterNode node_id::Vector{NodeID} - inflow_id::Vector{NodeID} - outflow_id::Vector{NodeID} + inflow_edge::Vector{EdgeMetadata} + outflow_edge::Vector{EdgeMetadata} active::BitVector length::Vector{Float64} manning_n::Vector{Float64} profile_width::Vector{Float64} profile_slope::Vector{Float64} + upstream_bottom::Vector{Float64} + downstream_bottom::Vector{Float64} control_mapping::Dict{Tuple{NodeID, String}, NamedTuple} end """ -Requirements: - -* from: must be (TabulatedRatingCurve,) node -* to: must be (Basin,) node -* fraction must be positive. - -node_id: node ID of the TabulatedRatingCurve node -inflow_id: node ID across the incoming flow edge -outflow_id: node ID across the outgoing flow edge +node_id: node ID of the FractionalFlow node +inflow_edge: incoming flow edge metadata + The ID of the destination node is always the ID of the FractionalFlow node +outflow_edge: outgoing flow edge metadata + The ID of the source node is always the ID of the FractionalFlow node fraction: The fraction in [0,1] of flow the node lets through control_mapping: dictionary from (node_id, control_state) to fraction """ struct FractionalFlow <: AbstractParameterNode node_id::Vector{NodeID} - inflow_id::Vector{NodeID} - outflow_id::Vector{NodeID} + inflow_edge::Vector{EdgeMetadata} + outflow_edge::Vector{EdgeMetadata} fraction::Vector{Float64} control_mapping::Dict{Tuple{NodeID, String}, NamedTuple} end @@ -362,8 +372,10 @@ end """ node_id: node ID of the Pump node -inflow_id: node ID across the incoming flow edge -outflow_ids: node IDs across the outgoing flow edges +inflow_edge: incoming flow edge metadata + The ID of the destination node is always the ID of the Pump node +outflow_edges: outgoing flow edges metadata + The ID of the source node is always the ID of the Pump node active: whether this node is active and thus contributes flow flow_rate: target flow rate min_flow_rate: The minimal flow rate of the pump @@ -373,8 +385,8 @@ is_pid_controlled: whether the flow rate of this pump is governed by PID control """ struct Pump{T} <: AbstractParameterNode node_id::Vector{NodeID} - inflow_id::Vector{NodeID} - outflow_ids::Vector{Vector{NodeID}} + inflow_edge::Vector{EdgeMetadata} + outflow_edges::Vector{Vector{EdgeMetadata}} active::BitVector flow_rate::T min_flow_rate::Vector{Float64} @@ -384,8 +396,8 @@ struct Pump{T} <: AbstractParameterNode function Pump( node_id, - inflow_id, - outflow_ids, + inflow_edge, + outflow_edges, active, flow_rate::T, min_flow_rate, @@ -396,8 +408,8 @@ struct Pump{T} <: AbstractParameterNode if valid_flow_rates(node_id, get_tmp(flow_rate, 0), control_mapping) return new{T}( node_id, - inflow_id, - outflow_ids, + inflow_edge, + outflow_edges, active, flow_rate, min_flow_rate, @@ -413,8 +425,10 @@ end """ node_id: node ID of the Outlet node -inflow_id: node ID across the incoming flow edge -outflow_ids: node IDs across the outgoing flow edges +inflow_edge: incoming flow edge metadata. + The ID of the destination node is always the ID of the Outlet node +outflow_edges: outgoing flow edges metadata. + The ID of the source node is always the ID of the Outlet node active: whether this node is active and thus contributes flow flow_rate: target flow rate min_flow_rate: The minimal flow rate of the outlet @@ -424,8 +438,8 @@ is_pid_controlled: whether the flow rate of this outlet is governed by PID contr """ struct Outlet{T} <: AbstractParameterNode node_id::Vector{NodeID} - inflow_id::Vector{NodeID} - outflow_ids::Vector{Vector{NodeID}} + inflow_edge::Vector{EdgeMetadata} + outflow_edges::Vector{Vector{EdgeMetadata}} active::BitVector flow_rate::T min_flow_rate::Vector{Float64} @@ -528,8 +542,10 @@ end """ node_id: node ID of the UserDemand node -inflow_id: node ID across the incoming flow edge -outflow_id: node ID across the outgoing flow edge +inflow_edge: incoming flow edge + The ID of the destination node is always the ID of the UserDemand node +outflow_edge: outgoing flow edge metadata + The ID of the source node is always the ID of the UserDemand node active: whether this node is active and thus demands water realized_bmi: Cumulative inflow volume, for read or reset by BMI only demand: water flux demand of UserDemand per priority over time @@ -545,8 +561,8 @@ min_level: The level of the source basin below which the UserDemand does not abs """ struct UserDemand <: AbstractParameterNode node_id::Vector{NodeID} - inflow_id::Vector{NodeID} - outflow_id::Vector{NodeID} + inflow_edge::Vector{EdgeMetadata} + outflow_edge::Vector{EdgeMetadata} active::BitVector realized_bmi::Vector{Float64} demand::Matrix{Float64} @@ -632,7 +648,7 @@ struct Parameters{T, C1, C2, V1, V2, V3} @NamedTuple{ node_ids::Dict{Int32, Set{NodeID}}, edges_source::Dict{Int32, Set{EdgeMetadata}}, - flow_dict::Dict{Tuple{NodeID, NodeID}, Int}, + flow_dict::Dict{Tuple{NodeID, NodeID}, Int32}, flow::T, flow_prev::Vector{Float64}, flow_integrated::Vector{Float64}, diff --git a/core/src/read.jl b/core/src/read.jl index 523914a41..2c7b2df43 100644 --- a/core/src/read.jl +++ b/core/src/read.jl @@ -246,8 +246,8 @@ function LinearResistance(db::DB, config::Config, graph::MetaGraph)::LinearResis return LinearResistance( node_id, - inflow_id.(Ref(graph), node_id), - outflow_id.(Ref(graph), node_id), + inflow_edge.(Ref(graph), node_id), + outflow_edge.(Ref(graph), node_id), BitVector(parsed_parameters.active), parsed_parameters.resistance, parsed_parameters.max_flow_rate, @@ -328,8 +328,8 @@ function TabulatedRatingCurve( return TabulatedRatingCurve( node_ids, - inflow_id.(Ref(graph), node_ids), - [collect(outflow_ids(graph, id)) for id in node_ids], + inflow_edge.(Ref(graph), node_ids), + outflow_edges.(Ref(graph), node_ids), active, interpolations, time, @@ -337,7 +337,12 @@ function TabulatedRatingCurve( ) end -function ManningResistance(db::DB, config::Config, graph::MetaGraph)::ManningResistance +function ManningResistance( + db::DB, + config::Config, + graph::MetaGraph, + basin::Basin, +)::ManningResistance static = load_structvector(db, config, ManningResistanceStaticV1) parsed_parameters, valid = parse_static_and_time(db, config, "ManningResistance"; static) @@ -347,16 +352,20 @@ function ManningResistance(db::DB, config::Config, graph::MetaGraph)::ManningRes end node_id = NodeID.(NodeType.ManningResistance, parsed_parameters.node_id) + upstream_bottom = basin_bottom.(Ref(basin), inflow_id.(Ref(graph), node_id)) + downstream_bottom = basin_bottom.(Ref(basin), outflow_id.(Ref(graph), node_id)) return ManningResistance( node_id, - inflow_id.(Ref(graph), node_id), - outflow_id.(Ref(graph), node_id), + inflow_edge.(Ref(graph), node_id), + outflow_edge.(Ref(graph), node_id), BitVector(parsed_parameters.active), parsed_parameters.length, parsed_parameters.manning_n, parsed_parameters.profile_width, parsed_parameters.profile_slope, + [bottom[2] for bottom in upstream_bottom], + [bottom[2] for bottom in downstream_bottom], parsed_parameters.control_mapping, ) end @@ -373,8 +382,8 @@ function FractionalFlow(db::DB, config::Config, graph::MetaGraph)::FractionalFlo return FractionalFlow( node_id, - inflow_id.(Ref(graph), node_id), - outflow_id.(Ref(graph), node_id), + inflow_edge.(Ref(graph), node_id), + outflow_edge.(Ref(graph), node_id), parsed_parameters.fraction, parsed_parameters.control_mapping, ) @@ -464,8 +473,8 @@ function Pump(db::DB, config::Config, graph::MetaGraph, chunk_sizes::Vector{Int} return Pump( node_id, - inflow_id.(Ref(graph), node_id), - [collect(outflow_ids(graph, id)) for id in node_id], + inflow_edge.(Ref(graph), node_id), + outflow_edges.(Ref(graph), node_id), BitVector(parsed_parameters.active), flow_rate, parsed_parameters.min_flow_rate, @@ -497,8 +506,8 @@ function Outlet(db::DB, config::Config, graph::MetaGraph, chunk_sizes::Vector{In return Outlet( node_id, - inflow_id.(Ref(graph), node_id), - [collect(outflow_ids(graph, id)) for id in node_id], + inflow_edge.(Ref(graph), node_id), + outflow_edges.(Ref(graph), node_id), BitVector(parsed_parameters.active), flow_rate, parsed_parameters.min_flow_rate, @@ -889,8 +898,8 @@ function UserDemand(db::DB, config::Config, graph::MetaGraph)::UserDemand return UserDemand( node_ids, - inflow_id.(Ref(graph), node_ids), - outflow_id.(Ref(graph), node_ids), + inflow_edge.(Ref(graph), node_ids), + outflow_edge.(Ref(graph), node_ids), active, realized_bmi, demand, @@ -1071,8 +1080,11 @@ function Parameters(db::DB, config::Config)::Parameters error("Invalid number of connections for certain node types.") end + basin = Basin(db, config, graph, chunk_sizes) + set_basin_idxs!(graph, basin) + linear_resistance = LinearResistance(db, config, graph) - manning_resistance = ManningResistance(db, config, graph) + manning_resistance = ManningResistance(db, config, graph, basin) tabulated_rating_curve = TabulatedRatingCurve(db, config, graph) fractional_flow = FractionalFlow(db, config, graph) level_boundary = LevelBoundary(db, config) @@ -1086,7 +1098,6 @@ function Parameters(db::DB, config::Config)::Parameters level_demand = LevelDemand(db, config) flow_demand = FlowDemand(db, config) - basin = Basin(db, config, graph, chunk_sizes) subgrid_level = Subgrid(db, config, basin) p = Parameters( diff --git a/core/src/solve.jl b/core/src/solve.jl index 6d0b2538b..a0334bee0 100644 --- a/core/src/solve.jl +++ b/core/src/solve.jl @@ -25,7 +25,7 @@ function water_balance!( formulate_flows!(p, storage, t) # Now formulate du - formulate_du!(du, graph, basin, storage) + formulate_du!(du, graph, storage) # PID control (changes the du of PID controlled basins) continuous_control!(u, du, pid_control, p, integral, t) @@ -40,7 +40,10 @@ function set_current_basin_properties!(basin::Basin, storage::AbstractVector)::N for i in eachindex(storage) s = storage[i] - current_area[i], current_level[i] = get_area_and_level(basin, i, s) + area, level = get_area_and_level(basin, i, s) + + current_area[i] = area + current_level[i] = level end end @@ -148,8 +151,11 @@ function continuous_control!( src_id = inflow_id(graph, controlled_node_id) dst_id = outflow_id(graph, controlled_node_id) - has_src_level, src_level = get_level(p, src_id, t; storage) - has_dst_level, dst_level = get_level(p, dst_id, t; storage) + inflow_edge = graph[src_id, controlled_node_id] + outflow_edge = graph[controlled_node_id, dst_id] + + has_src_level, src_level = get_level(p, inflow_edge, src_id, t; storage) + has_dst_level, dst_level = get_level(p, outflow_edge, dst_id, t; storage) factor_outlet = 1.0 @@ -173,7 +179,8 @@ function continuous_control!( end id_inflow = inflow_id(graph, controlled_node_id) - factor_basin = low_storage_factor(storage, basin.node_id, id_inflow, 10.0) + inflow_edge = graph[id_inflow, controlled_node_id] + factor_basin = low_storage_factor(storage, inflow_edge, id_inflow, 10.0) factor = factor_basin * factor_outlet flow_rate = 0.0 @@ -267,12 +274,12 @@ function formulate_flow!( storage::AbstractVector, t::Number, )::Nothing - (; graph, basin, allocation) = p + (; graph, allocation) = p for ( node_id, - inflow_id, - outflow_id, + inflow_edge, + outflow_edge, active, demand_itp, demand, @@ -282,8 +289,8 @@ function formulate_flow!( demand_from_timeseries, ) in zip( user_demand.node_id, - user_demand.inflow_id, - user_demand.outflow_id, + user_demand.inflow_edge, + user_demand.outflow_edge, user_demand.active, user_demand.demand_itp, # TODO permute these so the nodes are the last dimension, for performance @@ -315,20 +322,21 @@ function formulate_flow!( end # Smoothly let abstraction go to 0 as the source basin dries out - factor_basin = low_storage_factor(storage, basin.node_id, inflow_id, 10.0) + inflow_id = inflow_edge.edge[1] + factor_basin = low_storage_factor(storage, inflow_edge, inflow_id, 10.0) q *= factor_basin # Smoothly let abstraction go to 0 as the source basin # level reaches its minimum level - _, source_level = get_level(p, inflow_id, t; storage) + _, source_level = get_level(p, inflow_edge, inflow_id, t; storage) Δsource_level = source_level - min_level factor_level = reduction_factor(Δsource_level, 0.1) q *= factor_level - set_flow!(graph, inflow_id, node_id, q) + set_flow!(graph, inflow_edge, q) # Return flow is immediate - set_flow!(graph, node_id, outflow_id, q * return_factor) + set_flow!(graph, outflow_edge, q * return_factor) end return nothing end @@ -345,24 +353,27 @@ function formulate_flow!( (; graph) = p (; node_id, active, resistance, max_flow_rate) = linear_resistance for (i, id) in enumerate(node_id) - inflow_id = linear_resistance.inflow_id[i] - outflow_id = linear_resistance.outflow_id[i] + inflow_edge = linear_resistance.inflow_edge[i] + outflow_edge = linear_resistance.outflow_edge[i] + + inflow_id = inflow_edge.edge[1] + outflow_id = outflow_edge.edge[2] if active[i] - _, h_a = get_level(p, inflow_id, t; storage) - _, h_b = get_level(p, outflow_id, t; storage) + _, h_a = get_level(p, inflow_edge, inflow_id, t; storage) + _, h_b = get_level(p, outflow_edge, outflow_id, t; storage) q_unlimited = (h_a - h_b) / resistance[i] q = clamp(q_unlimited, -max_flow_rate[i], max_flow_rate[i]) # add reduction_factor on highest level if q > 0 - q *= low_storage_factor(storage, p.basin.node_id, inflow_id, 10.0) + q *= low_storage_factor(storage, inflow_edge, inflow_id, 10.0) else - q *= low_storage_factor(storage, p.basin.node_id, outflow_id, 10.0) + q *= low_storage_factor(storage, outflow_edge, outflow_id, 10.0) end - set_flow!(graph, inflow_id, id, q) - set_flow!(graph, id, outflow_id, q) + set_flow!(graph, inflow_edge, q) + set_flow!(graph, outflow_edge, q) end end return nothing @@ -377,23 +388,26 @@ function formulate_flow!( storage::AbstractVector, t::Number, )::Nothing - (; basin, graph) = p - (; node_id, active, tables, inflow_id, outflow_ids) = tabulated_rating_curve + (; graph) = p + (; node_id, active, tables, inflow_edge, outflow_edges) = tabulated_rating_curve for (i, id) in enumerate(node_id) - upstream_basin_id = inflow_id[i] - downstream_ids = outflow_ids[i] + upstream_edge = inflow_edge[i] + downstream_edges = outflow_edges[i] + upstream_basin_id = upstream_edge.edge[1] if active[i] - factor = low_storage_factor(storage, basin.node_id, upstream_basin_id, 10.0) - q = factor * tables[i](get_level(p, upstream_basin_id, t; storage)[2]) + factor = low_storage_factor(storage, upstream_edge, upstream_basin_id, 10.0) + q = + factor * + tables[i](get_level(p, upstream_edge, upstream_basin_id, t; storage)[2]) else q = 0.0 end - set_flow!(graph, upstream_basin_id, id, q) - for downstream_id in downstream_ids - set_flow!(graph, id, downstream_id, q) + set_flow!(graph, upstream_edge, q) + for downstream_edge in downstream_edges + set_flow!(graph, downstream_edge, q) end end return nothing @@ -441,24 +455,35 @@ dry. function formulate_flow!( manning_resistance::ManningResistance, p::Parameters, - storage::AbstractVector, + storage::AbstractVector{T}, t::Number, -)::Nothing - (; basin, graph) = p - (; node_id, active, length, manning_n, profile_width, profile_slope) = - manning_resistance +)::Nothing where {T} + (; graph) = p + (; + node_id, + active, + length, + manning_n, + profile_width, + profile_slope, + upstream_bottom, + downstream_bottom, + ) = manning_resistance for (i, id) in enumerate(node_id) - inflow_id = manning_resistance.inflow_id[i] - outflow_id = manning_resistance.outflow_id[i] + inflow_edge = manning_resistance.inflow_edge[i] + outflow_edge = manning_resistance.outflow_edge[i] + + inflow_id = inflow_edge.edge[1] + outflow_id = outflow_edge.edge[2] if !active[i] continue end - _, h_a = get_level(p, inflow_id, t; storage) - _, h_b = get_level(p, outflow_id, t; storage) - _, bottom_a = basin_bottom(basin, inflow_id) - _, bottom_b = basin_bottom(basin, outflow_id) + _, h_a = get_level(p, inflow_edge, inflow_id, t; storage) + _, h_b = get_level(p, outflow_edge, outflow_id, t; storage) + bottom_a = upstream_bottom[i] + bottom_b = downstream_bottom[i] slope = profile_slope[i] width = profile_width[i] n = manning_n[i] @@ -481,15 +506,15 @@ function formulate_flow!( P_b = width + 2.0 * d_b * slope_unit_length R_h_a = A_a / P_a R_h_b = A_b / P_b - R_h = 0.5 * (R_h_a + R_h_b) + R_h::T = 0.5 * (R_h_a + R_h_b) k = 1000.0 # This epsilon makes sure the AD derivative at Δh = 0 does not give NaN eps = 1e-200 - q = q_sign * A / n * R_h^(2 / 3) * sqrt(Δh / L * 2 / π * atan(k * Δh) + eps) + q = q_sign * A / n * ∛(R_h^2) * sqrt(Δh / L * 2 / π * atan(k * Δh) + eps) - set_flow!(graph, inflow_id, id, q) - set_flow!(graph, id, outflow_id, q) + set_flow!(graph, inflow_edge, q) + set_flow!(graph, outflow_edge, q) end return nothing end @@ -502,16 +527,16 @@ function formulate_flow!( )::Nothing (; graph) = p - for (node_id, inflow_id, outflow_id, fraction) in zip( + for (node_id, inflow_edge, outflow_edge, fraction) in zip( fractional_flow.node_id, - fractional_flow.inflow_id, - fractional_flow.outflow_id, + fractional_flow.inflow_edge, + fractional_flow.outflow_edge, fractional_flow.fraction, ) # overwrite the inflow such that flow is conserved over the FractionalFlow - outflow = get_flow(graph, inflow_id, node_id, storage) * fraction - set_flow!(graph, inflow_id, node_id, outflow) - set_flow!(graph, node_id, outflow_id, outflow) + outflow = get_flow(graph, inflow_edge, storage) * fraction + set_flow!(graph, inflow_edge, outflow) + set_flow!(graph, outflow_edge, outflow) end return nothing end @@ -548,10 +573,10 @@ function formulate_flow!( )::Nothing (; graph, basin) = p - for (node_id, inflow_id, outflow_ids, active, flow_rate, is_pid_controlled) in zip( + for (node_id, inflow_edge, outflow_edges, active, flow_rate, is_pid_controlled) in zip( pump.node_id, - pump.inflow_id, - pump.outflow_ids, + pump.inflow_edge, + pump.outflow_edges, pump.active, get_tmp(pump.flow_rate, storage), pump.is_pid_controlled, @@ -560,13 +585,14 @@ function formulate_flow!( continue end - factor = low_storage_factor(storage, basin.node_id, inflow_id, 10.0) + inflow_id = inflow_edge.edge[1] + factor = low_storage_factor(storage, inflow_edge, inflow_id, 10.0) q = flow_rate * factor - set_flow!(graph, inflow_id, node_id, q) + set_flow!(graph, inflow_edge, q) - for outflow_id in outflow_ids - set_flow!(graph, node_id, outflow_id, q) + for outflow_edge in outflow_edges + set_flow!(graph, outflow_edge, q) end end return nothing @@ -578,20 +604,20 @@ function formulate_flow!( storage::AbstractVector, t::Number, )::Nothing - (; graph, basin) = p + (; graph) = p for ( node_id, - inflow_id, - outflow_ids, + inflow_edge, + outflow_edges, active, flow_rate, is_pid_controlled, min_crest_level, ) in zip( outlet.node_id, - outlet.inflow_id, - outlet.outflow_ids, + outlet.inflow_edge, + outlet.outflow_edges, outlet.active, get_tmp(outlet.flow_rate, storage), outlet.is_pid_controlled, @@ -601,14 +627,16 @@ function formulate_flow!( continue end + inflow_id = inflow_edge.edge[1] q = flow_rate - q *= low_storage_factor(storage, basin.node_id, inflow_id, 10.0) + q *= low_storage_factor(storage, inflow_edge, inflow_id, 10.0) # No flow of outlet if source level is lower than target level # TODO support multiple outflows to FractionalFlow, or refactor FractionalFlow - outflow_id = only(outflow_ids) - _, src_level = get_level(p, inflow_id, t; storage) - _, dst_level = get_level(p, outflow_id, t; storage) + outflow_edge = only(outflow_edges) + outflow_id = outflow_edge.edge[2] + _, src_level = get_level(p, inflow_edge, inflow_id, t; storage) + _, dst_level = get_level(p, outflow_edge, outflow_id, t; storage) if src_level !== nothing && dst_level !== nothing Δlevel = src_level - dst_level @@ -620,10 +648,10 @@ function formulate_flow!( q *= reduction_factor(src_level - min_crest_level, 0.1) end - set_flow!(graph, inflow_id, node_id, q) + set_flow!(graph, inflow_edge, q) - for outflow_id in outflow_ids - set_flow!(graph, node_id, outflow_id, q) + for outflow_edge in outflow_edges + set_flow!(graph, outflow_edge, q) end end return nothing @@ -632,18 +660,24 @@ end function formulate_du!( du::ComponentVector, graph::MetaGraph, - basin::Basin, storage::AbstractVector, )::Nothing # loop over basins # subtract all outgoing flows # add all ingoing flows - for (i, basin_id) in enumerate(basin.node_id) - for inflow_id in basin.inflow_ids[i] - du[i] += get_flow(graph, inflow_id, basin_id, storage) + for edge_metadata in values(graph.edge_data) + (; type, edge, basin_idx_src, basin_idx_dst) = edge_metadata + if type !== EdgeType.flow + continue end - for outflow_id in basin.outflow_ids[i] - du[i] -= get_flow(graph, basin_id, outflow_id, storage) + from_id, to_id = edge + + if from_id.type == NodeType.Basin + q = get_flow(graph, edge_metadata, storage) + du[basin_idx_src] -= q + elseif to_id.type == NodeType.Basin + q = get_flow(graph, edge_metadata, storage) + du[basin_idx_dst] += q end end return nothing diff --git a/core/src/util.jl b/core/src/util.jl index d03a65b4f..4cc6d0f84 100644 --- a/core/src/util.jl +++ b/core/src/util.jl @@ -89,11 +89,7 @@ end Compute the area and level of a basin given its storage. Also returns darea/dlevel as it is needed for the Jacobian. """ -function get_area_and_level( - basin::Basin, - state_idx::Int, - storage::Number, -)::Tuple{Real, Real} +function get_area_and_level(basin::Basin, state_idx::Int, storage::T)::Tuple{T, T} where {T} storage_discrete = basin.storage[state_idx] area_discrete = basin.area[state_idx] level_discrete = basin.level[state_idx] @@ -102,11 +98,16 @@ function get_area_and_level( end function get_area_and_level( - storage_discrete::Vector, - area_discrete::Vector, - level_discrete::Vector, - storage::Number, -)::Tuple{Number, Number} + storage_discrete::AbstractVector, + area_discrete::AbstractVector, + level_discrete::AbstractVector, + storage::T, +)::Tuple{T, T} where {T} + + # Set type of area and level to prevent runtime dispatch + area::T = zero(T) + level::T = zero(T) + # storage_idx: smallest index such that storage_discrete[storage_idx] >= storage storage_idx = searchsortedfirst(storage_discrete, storage) @@ -115,13 +116,6 @@ function get_area_and_level( level = level_discrete[1] area = area_discrete[1] - level_lower = level - level_higher = level_discrete[2] - area_lower = area - area_higher = area_discrete[2] - - darea = (area_higher - area_lower) / (level_higher - level_lower) - elseif storage_idx == length(storage_discrete) + 1 # With a storage above the profile, use a linear extrapolation of area(level) # based on the last 2 values. @@ -132,20 +126,18 @@ function get_area_and_level( storage_lower = storage_discrete[end - 1] storage_higher = storage_discrete[end] - area_diff = area_higher - area_lower - level_diff = level_higher - level_lower + Δarea = area_higher - area_lower + Δlevel = level_higher - level_lower + Δstorage = storage_higher - storage_lower - if area_diff ≈ 0 + if Δarea ≈ 0.0 # Constant area means linear interpolation of level - darea = 0.0 area = area_lower - level = - level_higher + - level_diff * (storage - storage_higher) / (storage_higher - storage_lower) + level = level_higher + Δlevel * (storage - storage_higher) / Δstorage else - darea = area_diff / level_diff + darea = Δarea / Δlevel area = sqrt(area_higher^2 + 2 * (storage - storage_higher) * darea) - level = level_lower + level_diff * (area - area_lower) / area_diff + level = level_lower + Δlevel * (area - area_lower) / Δarea end else @@ -156,21 +148,19 @@ function get_area_and_level( storage_lower = storage_discrete[storage_idx - 1] storage_higher = storage_discrete[storage_idx] - area_diff = area_higher - area_lower - level_diff = level_higher - level_lower + Δarea = area_higher - area_lower + Δlevel = level_higher - level_lower + Δstorage = storage_higher - storage_lower - if area_diff ≈ 0 + if Δarea ≈ 0.0 # Constant area means linear interpolation of level - darea = 0.0 area = area_lower - level = - level_lower + - level_diff * (storage - storage_lower) / (storage_higher - storage_lower) + level = level_lower + Δlevel * (storage - storage_lower) / Δstorage else - darea = area_diff / level_diff + darea = Δarea / Δlevel area = sqrt(area_lower^2 + 2 * (storage - storage_lower) * darea) - level = level_lower + level_diff * (area - area_lower) / area_diff + level = level_lower + Δlevel * (area - area_lower) / Δarea end end @@ -373,13 +363,16 @@ storage: tells ForwardDiff whether this call is for differentiation or not """ function get_level( p::Parameters, + edge_metadata::EdgeMetadata, node_id::NodeID, t::Number; storage::Union{AbstractArray, Number} = 0, )::Tuple{Bool, Number} (; basin, level_boundary) = p if node_id.type == NodeType.Basin - _, i = id_index(basin.node_id, node_id) + # The edge metadata is only used to obtain the Basin index + # in case node_id is for a Basin + i = get_basin_idx(edge_metadata, node_id) current_level = get_tmp(basin.current_level, storage) return true, current_level[i] elseif node_id.type == NodeType.LevelBoundary @@ -549,13 +542,13 @@ end "If id is a Basin with storage below the threshold, return a reduction factor != 1" function low_storage_factor( storage::AbstractVector{T}, - basin_ids::Indices{NodeID}, + edge_metadata::EdgeMetadata, id::NodeID, threshold::Real, )::T where {T <: Real} - hasindex, basin_idx = id_index(basin_ids, id) - return if hasindex - reduction_factor(storage[basin_idx], threshold) + if id.type == NodeType.Basin + i = get_basin_idx(edge_metadata, id) + reduction_factor(storage[i], threshold) else one(T) end @@ -734,3 +727,31 @@ has_fractional_flow_outneighbors(graph::MetaGraph, node_id::NodeID)::Bool = any( outneighbor_id.type == NodeType.FractionalFlow for outneighbor_id in outflow_ids(graph, node_id) ) + +inflow_edge(graph, node_id)::EdgeMetadata = graph[inflow_id(graph, node_id), node_id] +outflow_edge(graph, node_id)::EdgeMetadata = graph[node_id, outflow_id(graph, node_id)] +outflow_edges(graph, node_id)::Vector{EdgeMetadata} = + [graph[node_id, outflow_id] for outflow_id in outflow_ids(graph, node_id)] + +function set_basin_idxs!(graph::MetaGraph, basin::Basin)::Nothing + for (edge, edge_metadata) in graph.edge_data + id_src, id_dst = edge + edge_metadata = + @set edge_metadata.basin_idx_src = id_index(basin.node_id, id_src)[2] + edge_metadata = + @set edge_metadata.basin_idx_dst = id_index(basin.node_id, id_dst)[2] + graph[edge...] = edge_metadata + end + return nothing +end + +function get_basin_idx(edge_metadata::EdgeMetadata, id::NodeID)::Int32 + (; edge) = edge_metadata + return if edge[1] == id + edge_metadata.basin_idx_src + elseif edge[2] == id + edge_metadata.basin_idx_dst + else + 0 + end +end diff --git a/core/test/data/config_test.toml b/core/test/data/config_test.toml index a3ecfbe44..a1a0d9f80 100644 --- a/core/test/data/config_test.toml +++ b/core/test/data/config_test.toml @@ -3,7 +3,7 @@ endtime = 2019-12-31 crs = "EPSG:28992" input_dir = "../../generated_testmodels/lhm" results_dir = "../../generated_testmodels/lhm" -ribasim_version = "2024.7.0" +ribasim_version = "2024.8.0" [basin] time = "basin/time.arrow" diff --git a/core/test/data/logging_test_loglevel_debug.toml b/core/test/data/logging_test_loglevel_debug.toml index 9c3041d27..884191315 100644 --- a/core/test/data/logging_test_loglevel_debug.toml +++ b/core/test/data/logging_test_loglevel_debug.toml @@ -3,7 +3,7 @@ endtime = 2019-12-31 crs = "EPSG:28992" input_dir = "." results_dir = "results" -ribasim_version = "2024.7.0" +ribasim_version = "2024.8.0" [logging] verbosity = "debug" diff --git a/core/test/data/logging_test_no_loglevel.toml b/core/test/data/logging_test_no_loglevel.toml index 8e44fc276..333c8c2ef 100644 --- a/core/test/data/logging_test_no_loglevel.toml +++ b/core/test/data/logging_test_no_loglevel.toml @@ -3,4 +3,4 @@ endtime = 2019-12-31 crs = "EPSG:28992" input_dir = "." results_dir = "results" -ribasim_version = "2024.7.0" +ribasim_version = "2024.8.0" diff --git a/core/test/docs.toml b/core/test/docs.toml index 341eb6111..f59c7b0a8 100644 --- a/core/test/docs.toml +++ b/core/test/docs.toml @@ -12,7 +12,7 @@ crs = "EPSG:4326" # required input_dir = "." # required results_dir = "results" # required -ribasim_version = "2024.7.0" # required +ribasim_version = "2024.8.0" # required # Specific tables can also go into Arrow files rather than the database. # For large tables this can benefit from better compressed file sizes. diff --git a/core/test/utils_test.jl b/core/test/utils_test.jl index ff2520bca..50bfe9076 100644 --- a/core/test/utils_test.jl +++ b/core/test/utils_test.jl @@ -240,49 +240,17 @@ end end @testitem "low_storage_factor" begin - using Ribasim: NodeID, low_storage_factor, Indices - @test low_storage_factor( - [-2.0], - Indices(NodeID.(:Basin, [5])), - NodeID(:Basin, 5), - 2.0, - ) === 0.0 - @test low_storage_factor( - [0.0f0], - Indices(NodeID.(:Basin, [5])), - NodeID(:Basin, 5), - 2.0, - ) === 0.0f0 - @test low_storage_factor( - [0.0], - Indices(NodeID.(:Basin, [5])), - NodeID(:Basin, 5), - 2.0, - ) === 0.0 - @test low_storage_factor( - [1.0f0], - Indices(NodeID.(:Basin, [5])), - NodeID(:Basin, 5), - 2.0, - ) === 0.5f0 - @test low_storage_factor( - [1.0], - Indices(NodeID.(:Basin, [5])), - NodeID(:Basin, 5), - 2.0, - ) === 0.5 - @test low_storage_factor( - [3.0f0], - Indices(NodeID.(:Basin, [5])), - NodeID(:Basin, 5), - 2.0, - ) === 1.0f0 - @test low_storage_factor( - [3.0], - Indices(NodeID.(:Basin, [5])), - NodeID(:Basin, 5), - 2.0, - ) === 1.0 + using Ribasim: NodeID, low_storage_factor, EdgeMetadata, EdgeType + + node_id = NodeID(:Basin, 5) + edge_metadata = EdgeMetadata(0, 0, EdgeType.flow, 0, (node_id, node_id), 1, 1) + @test low_storage_factor([-2.0], edge_metadata, node_id, 2.0) === 0.0 + @test low_storage_factor([0.0f0], edge_metadata, node_id, 2.0) === 0.0f0 + @test low_storage_factor([0.0], edge_metadata, node_id, 2.0) === 0.0 + @test low_storage_factor([1.0f0], edge_metadata, node_id, 2.0) === 0.5f0 + @test low_storage_factor([1.0], edge_metadata, node_id, 2.0) === 0.5 + @test low_storage_factor([3.0f0], edge_metadata, node_id, 2.0) === 1.0f0 + @test low_storage_factor([3.0], edge_metadata, node_id, 2.0) === 1.0 end @testitem "constraints_from_nodes" begin diff --git a/core/test/validation_test.jl b/core/test/validation_test.jl index 7453af40d..c9ad2b9d4 100644 --- a/core/test/validation_test.jl +++ b/core/test/validation_test.jl @@ -86,7 +86,7 @@ end graph[NodeID(:Pump, 6)] = NodeMetadata(:pump, 9) function set_edge_metadata!(id_1, id_2, edge_type) - graph[id_1, id_2] = EdgeMetadata(0, edge_type, 0, (id_1, id_2)) + graph[id_1, id_2] = EdgeMetadata(0, 0, edge_type, 0, (id_1, id_2), -1, -1) return nothing end @@ -163,7 +163,7 @@ end graph[NodeID(:Basin, 7)] = NodeMetadata(:basin, 0) function set_edge_metadata!(id_1, id_2, edge_type) - graph[id_1, id_2] = EdgeMetadata(0, edge_type, 0, (id_1, id_2)) + graph[id_1, id_2] = EdgeMetadata(0, 0, edge_type, 0, (id_1, id_2), -1, -1) return nothing end @@ -453,6 +453,25 @@ end ) end +@testitem "basin indices" begin + using Ribasim: NodeType + + toml_path = normpath(@__DIR__, "../../generated_testmodels/basic/ribasim.toml") + @test ispath(toml_path) + + model = Ribasim.Model(toml_path) + (; graph, basin) = model.integrator.p + for edge_metadata in values(graph.edge_data) + (; edge, basin_idx_src, basin_idx_dst) = edge_metadata + id_src, id_dst = edge + if id_src.type == NodeType.Basin + @test id_src == basin.node_id.values[basin_idx_src] + elseif id_dst.type == NodeType.Basin + @test id_dst == basin.node_id.values[basin_idx_dst] + end + end +end + @testitem "Convergence bottleneck" begin using IOCapture: capture toml_path = diff --git a/pixi.toml b/pixi.toml index 37773c580..7ca1a8741 100644 --- a/pixi.toml +++ b/pixi.toml @@ -1,6 +1,6 @@ [project] name = "Ribasim" -version = "2024.7.0" +version = "2024.8.0" description = "Water resources modeling" authors = ["Deltares and contributors "] channels = ["conda-forge"] diff --git a/python/ribasim/ribasim/__init__.py b/python/ribasim/ribasim/__init__.py index 083d10b68..6f0ec385a 100644 --- a/python/ribasim/ribasim/__init__.py +++ b/python/ribasim/ribasim/__init__.py @@ -1,4 +1,4 @@ -__version__ = "2024.7.0" +__version__ = "2024.8.0" from ribasim.config import Allocation, Logging, Node, Solver diff --git a/python/ribasim_api/ribasim_api/__init__.py b/python/ribasim_api/ribasim_api/__init__.py index 7b7f04fa8..8500b8953 100644 --- a/python/ribasim_api/ribasim_api/__init__.py +++ b/python/ribasim_api/ribasim_api/__init__.py @@ -1,4 +1,4 @@ -__version__ = "2024.7.0" +__version__ = "2024.8.0" from ribasim_api.ribasim_api import RibasimApi diff --git a/ribasim_qgis/metadata.txt b/ribasim_qgis/metadata.txt index 9363b34f2..550f1fb2e 100644 --- a/ribasim_qgis/metadata.txt +++ b/ribasim_qgis/metadata.txt @@ -7,7 +7,7 @@ name=Ribasim qgisMinimumVersion=3.0 description=QGIS plugin to setup Ribasim models -version=2024.7.0 +version=2024.8.0 author=Deltares and contributors email=ribasim.info@deltares.nl