diff --git a/build/libribasim/Manifest.toml b/build/libribasim/Manifest.toml index 5db3c6ffd..95db712d6 100644 --- a/build/libribasim/Manifest.toml +++ b/build/libribasim/Manifest.toml @@ -979,7 +979,7 @@ uuid = "ae029012-a4dd-5104-9daa-d747884805df" version = "1.3.0" [[deps.Ribasim]] -deps = ["Arrow", "BasicModelInterface", "CodecLz4", "CodecZstd", "ComponentArrays", "Configurations", "DBInterface", "DataInterpolations", "DataStructures", "Dates", "Dictionaries", "DiffEqCallbacks", "FiniteDiff", "ForwardDiff", "Graphs", "HiGHS", "IterTools", "JuMP", "Legolas", "Logging", "LoggingExtras", "OrdinaryDiffEq", "PreallocationTools", "SQLite", "SciMLBase", "SparseArrays", "StructArrays", "Tables", "TerminalLoggers", "TimerOutputs", "TranscodingStreams"] +deps = ["Arrow", "BasicModelInterface", "CodecLz4", "CodecZstd", "ComponentArrays", "Configurations", "DBInterface", "DataInterpolations", "DataStructures", "Dates", "Dictionaries", "DiffEqCallbacks", "FiniteDiff", "ForwardDiff", "Graphs", "HiGHS", "IterTools", "JuMP", "Legolas", "Logging", "LoggingExtras", "OrdinaryDiffEq", "PreallocationTools", "SQLite", "SciMLBase", "SparseArrays", "StructArrays", "Tables", "TerminalLoggers", "TimeZones", "TimerOutputs", "TranscodingStreams"] path = "../../core" uuid = "aac5e3d9-0b8f-4d4f-8241-b1a7a9632635" version = "0.3.0" diff --git a/build/ribasim_cli/Manifest.toml b/build/ribasim_cli/Manifest.toml index 5885340d8..873b83b61 100644 --- a/build/ribasim_cli/Manifest.toml +++ b/build/ribasim_cli/Manifest.toml @@ -979,7 +979,7 @@ uuid = "ae029012-a4dd-5104-9daa-d747884805df" version = "1.3.0" [[deps.Ribasim]] -deps = ["Arrow", "BasicModelInterface", "CodecLz4", "CodecZstd", "ComponentArrays", "Configurations", "DBInterface", "DataInterpolations", "DataStructures", "Dates", "Dictionaries", "DiffEqCallbacks", "FiniteDiff", "ForwardDiff", "Graphs", "HiGHS", "IterTools", "JuMP", "Legolas", "Logging", "LoggingExtras", "OrdinaryDiffEq", "PreallocationTools", "SQLite", "SciMLBase", "SparseArrays", "StructArrays", "Tables", "TerminalLoggers", "TimerOutputs", "TranscodingStreams"] +deps = ["Arrow", "BasicModelInterface", "CodecLz4", "CodecZstd", "ComponentArrays", "Configurations", "DBInterface", "DataInterpolations", "DataStructures", "Dates", "Dictionaries", "DiffEqCallbacks", "FiniteDiff", "ForwardDiff", "Graphs", "HiGHS", "IterTools", "JuMP", "Legolas", "Logging", "LoggingExtras", "OrdinaryDiffEq", "PreallocationTools", "SQLite", "SciMLBase", "SparseArrays", "StructArrays", "Tables", "TerminalLoggers", "TimeZones", "TimerOutputs", "TranscodingStreams"] path = "../../core" uuid = "aac5e3d9-0b8f-4d4f-8241-b1a7a9632635" version = "0.3.0" diff --git a/core/Manifest.toml b/core/Manifest.toml index 073418681..6dfa5a16b 100644 --- a/core/Manifest.toml +++ b/core/Manifest.toml @@ -16,9 +16,9 @@ version = "0.4.4" [[deps.Adapt]] deps = ["LinearAlgebra", "Requires"] -git-tree-sha1 = "76289dc51920fdc6e0013c872ba9551d54961c24" +git-tree-sha1 = "02f731463748db57cc2ebfbd9fbc9ce8280d3433" uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" -version = "3.6.2" +version = "3.7.1" weakdeps = ["StaticArrays"] [deps.Adapt.extensions] @@ -188,9 +188,9 @@ version = "1.0.5+0" [[deps.ComponentArrays]] deps = ["ArrayInterface", "ChainRulesCore", "ForwardDiff", "Functors", "LinearAlgebra", "PackageExtensionCompat", "StaticArrayInterface", "StaticArraysCore"] -git-tree-sha1 = "3ee12062466a73b5f9c04113375e4934f174bc2c" +git-tree-sha1 = "f1ac18d8d4e8a6303d6d2d1c870528c3c52f0895" uuid = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" -version = "0.15.3" +version = "0.15.4" [deps.ComponentArrays.extensions] ComponentArraysAdaptExt = "Adapt" @@ -300,9 +300,9 @@ version = "0.3.25" [[deps.DiffEqBase]] deps = ["ArrayInterface", "ChainRulesCore", "DataStructures", "DocStringExtensions", "EnumX", "EnzymeCore", "FastBroadcast", "ForwardDiff", "FunctionWrappers", "FunctionWrappersWrappers", "LinearAlgebra", "Logging", "Markdown", "MuladdMacro", "Parameters", "PreallocationTools", "PrecompileTools", "Printf", "RecursiveArrayTools", "Reexport", "Requires", "SciMLBase", "SciMLOperators", "Setfield", "SparseArrays", "Static", "StaticArraysCore", "Statistics", "Tricks", "TruncatedStacktraces", "ZygoteRules"] -git-tree-sha1 = "95b6df71e218379a831874215b0effaac791d7d7" +git-tree-sha1 = "36a590efdbee58b38f903ffc3b378f4a5336bc3f" uuid = "2b5f629d-d688-5b77-993f-72d75c75574e" -version = "6.133.1" +version = "6.134.0" [deps.DiffEqBase.extensions] DiffEqBaseDistributionsExt = "Distributions" @@ -501,9 +501,9 @@ version = "1.9.0" [[deps.HiGHS]] deps = ["HiGHS_jll", "MathOptInterface", "PrecompileTools", "SparseArrays"] -git-tree-sha1 = "9d75ef949c17a2a150b91b8365a6e5bc43a2a0d3" +git-tree-sha1 = "fce13308f09771b160232903cad57be39a8a0ebb" uuid = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" -version = "1.7.3" +version = "1.7.5" [[deps.HiGHS_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl"] @@ -577,9 +577,9 @@ version = "0.21.4" [[deps.JuMP]] deps = ["LinearAlgebra", "MacroTools", "MathOptInterface", "MutableArithmetics", "OrderedCollections", "Printf", "SnoopPrecompile", "SparseArrays"] -git-tree-sha1 = "3700a700bc80856fe673b355123ae4574f2d5dfe" +git-tree-sha1 = "25b2fcda4d455b6f93ac753730d741340ba4a4fe" uuid = "4076af6c-e467-56ae-b986-b466b2749572" -version = "1.15.1" +version = "1.16.0" [deps.JuMP.extensions] JuMPDimensionalDataExt = "DimensionalData" @@ -661,9 +661,9 @@ uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" [[deps.LinearSolve]] deps = ["ArrayInterface", "ConcreteStructs", "DocStringExtensions", "EnumX", "EnzymeCore", "FastLapackInterface", "GPUArraysCore", "InteractiveUtils", "KLU", "Krylov", "Libdl", "LinearAlgebra", "MKL_jll", "PrecompileTools", "Preferences", "RecursiveFactorization", "Reexport", "Requires", "SciMLBase", "SciMLOperators", "Setfield", "SparseArrays", "Sparspak", "SuiteSparse", "UnPack"] -git-tree-sha1 = "31353ba539d14a342908f765407abffd8db5f3c7" +git-tree-sha1 = "a563cd835c9ed5295c35a7d140e0bf0a514bb10a" uuid = "7ed4a6bd-45f5-4d41-b270-4a48e9bafcae" -version = "2.11.0" +version = "2.13.0" [deps.LinearSolve.extensions] LinearSolveBandedMatricesExt = "BandedMatrices" @@ -756,9 +756,9 @@ uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" [[deps.MathOptInterface]] deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "DataStructures", "ForwardDiff", "JSON", "LinearAlgebra", "MutableArithmetics", "NaNMath", "OrderedCollections", "PrecompileTools", "Printf", "SparseArrays", "SpecialFunctions", "Test", "Unicode"] -git-tree-sha1 = "5c9f1e635e8d491297e596b56fec1c95eafb95a3" +git-tree-sha1 = "13b3d40084d04e609e0509730f05215fb2a2fba4" uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" -version = "1.20.1" +version = "1.21.0" [[deps.MbedTLS_jll]] deps = ["Artifacts", "Libdl"] @@ -813,9 +813,9 @@ version = "1.2.0" [[deps.NonlinearSolve]] deps = ["ADTypes", "ArrayInterface", "ConcreteStructs", "DiffEqBase", "EnumX", "FiniteDiff", "ForwardDiff", "LineSearches", "LinearAlgebra", "LinearSolve", "PrecompileTools", "RecursiveArrayTools", "Reexport", "SciMLBase", "SimpleNonlinearSolve", "SparseArrays", "SparseDiffTools", "StaticArraysCore", "UnPack"] -git-tree-sha1 = "ee92770e0832314ccd424d83a0ab4c75fc6dc91f" +git-tree-sha1 = "9203b3333c9610664de2e8cbc23cfd726663df7d" uuid = "8913a72c-1f9b-4ce2-8d82-65094dcecaec" -version = "2.3.0" +version = "2.4.0" [deps.NonlinearSolve.extensions] NonlinearSolveFastLevenbergMarquardtExt = "FastLevenbergMarquardt" @@ -1021,17 +1021,19 @@ version = "3.43.0+0" [[deps.SciMLBase]] deps = ["ADTypes", "ArrayInterface", "ChainRulesCore", "CommonSolve", "ConstructionBase", "Distributed", "DocStringExtensions", "EnumX", "FillArrays", "FunctionWrappersWrappers", "IteratorInterfaceExtensions", "LinearAlgebra", "Logging", "Markdown", "PrecompileTools", "Preferences", "RecipesBase", "RecursiveArrayTools", "Reexport", "RuntimeGeneratedFunctions", "SciMLOperators", "StaticArraysCore", "Statistics", "SymbolicIndexingInterface", "Tables", "TruncatedStacktraces", "ZygoteRules"] -git-tree-sha1 = "6134c8970f82f23c43d3580d79c3e47acf232083" +git-tree-sha1 = "1c2a4e245744dd76b2eb677d3535ffad16d8b989" uuid = "0bca4576-84f4-4d90-8ffe-ffa030f20462" -version = "2.4.1" +version = "2.5.0" [deps.SciMLBase.extensions] + SciMLBasePartialFunctionsExt = "PartialFunctions" SciMLBasePyCallExt = "PyCall" SciMLBasePythonCallExt = "PythonCall" SciMLBaseRCallExt = "RCall" SciMLBaseZygoteExt = "Zygote" [deps.SciMLBase.weakdeps] + PartialFunctions = "570af359-4316-4cb7-8c74-252c00c2016b" PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" PythonCall = "6099a3de-0909-46bc-b1f4-468b9a2dfc0d" RCall = "6f49c342-dc21-5d91-9882-a32aef131414" @@ -1076,9 +1078,9 @@ uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" [[deps.SimpleNonlinearSolve]] deps = ["ArrayInterface", "DiffEqBase", "FiniteDiff", "ForwardDiff", "LinearAlgebra", "PackageExtensionCompat", "PrecompileTools", "Reexport", "SciMLBase", "StaticArraysCore"] -git-tree-sha1 = "e308d089f5d0e733a017b61784c5813e672f760d" +git-tree-sha1 = "15ff97fa4881133caa324dacafe28b5ac47ad8a2" uuid = "727e6d20-b764-4bd8-a329-72de5adea6c7" -version = "0.1.22" +version = "0.1.23" [deps.SimpleNonlinearSolve.extensions] SimpleNonlinearSolveNNlibExt = "NNlib" @@ -1231,9 +1233,9 @@ version = "1.0.1" [[deps.Tables]] deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits"] -git-tree-sha1 = "a1f34829d5ac0ef499f6d84428bd6b4c71f02ead" +git-tree-sha1 = "cb76cf677714c095e535e3501ac7954732aeea2d" uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" -version = "1.11.0" +version = "1.11.1" [[deps.Tar]] deps = ["ArgTools", "SHA"] @@ -1338,9 +1340,9 @@ version = "1.5.5+0" [[deps.ZygoteRules]] deps = ["ChainRulesCore", "MacroTools"] -git-tree-sha1 = "977aed5d006b840e2e40c0b48984f7463109046d" +git-tree-sha1 = "9d749cd449fb448aeca4feee9a2f4186dbb5d184" uuid = "700de1a5-db45-46bc-99cf-38207098b444" -version = "0.2.3" +version = "0.2.4" [[deps.libblastrampoline_jll]] deps = ["Artifacts", "Libdl"] diff --git a/core/src/Ribasim.jl b/core/src/Ribasim.jl index 6041bb6f3..8a6f28765 100644 --- a/core/src/Ribasim.jl +++ b/core/src/Ribasim.jl @@ -17,6 +17,7 @@ module Ribasim import IterTools import BasicModelInterface as BMI import HiGHS +import JuMP import JuMP.Model as JuMPModel import TranscodingStreams @@ -43,7 +44,6 @@ using Graphs: outneighbors, rem_edge! -using JuMP: @variable, @constraint, @objective, set_normalized_rhs, optimize!, value using Legolas: Legolas, @schema, @version, validate, SchemaVersion, declared using Logging: current_logger, min_enabled_level, with_logger using LoggingExtras: EarlyFilteredLogger, LevelOverrideLogger diff --git a/core/src/allocation.jl b/core/src/allocation.jl index 082c6d39d..2f06032f9 100644 --- a/core/src/allocation.jl +++ b/core/src/allocation.jl @@ -382,7 +382,7 @@ function add_variables_flow!( allocgraph_edges::Vector{Edge{Int}}, )::Nothing n_flows = length(allocgraph_edges) - problem[:F] = @variable(problem, F[1:n_flows] >= 0.0) + problem[:F] = JuMP.@variable(problem, F[1:n_flows] >= 0.0) return nothing end @@ -401,7 +401,7 @@ function add_variables_allocation_user!( allocgraph_node_id_user = allocgraph_edges[allocgraph_edge_id_user_demand].dst base_name = "A_user_$allocgraph_node_id_user" problem[Symbol(base_name)] = - @variable(problem, [1:length(user.priorities)], base_name = base_name) + JuMP.@variable(problem, [1:length(user.priorities)], base_name = base_name) end return nothing end @@ -416,7 +416,7 @@ function add_variables_allocation_basin!( node_id_mapping::Dict{Int, Tuple{Int, Symbol}}, allocgraph_node_ids_basin::Vector{Int}, )::Nothing - @variable(problem, A_basin[i = allocgraph_node_ids_basin] >= 0.0) + JuMP.@variable(problem, A_basin[i = allocgraph_node_ids_basin] >= 0.0) return nothing end @@ -447,16 +447,16 @@ function add_constraints_user_allocation!( base_name = "A_user_$allocgraph_node_id_user" A_user = problem[Symbol(base_name)] # Sum of allocations to user is total flow to user - @constraint( + JuMP.@constraint( problem, sum(A_user) == F[allocgraph_edge_id_user_demand], base_name = "allocation_sum[$allocgraph_node_id_user]" ) # Allocation flows are non-negative - @constraint(problem, [p = 1:length(user.priorities)], A_user[p] >= 0) + JuMP.@constraint(problem, [p = 1:length(user.priorities)], A_user[p] >= 0) # Allocation flows are bounded from above by demands base_name = "demand_user_$allocgraph_node_id_user" - problem[Symbol(base_name)] = @constraint( + problem[Symbol(base_name)] = JuMP.@constraint( problem, [p = 1:length(user.priorities)], A_user[p] <= 0, @@ -480,7 +480,7 @@ function add_constraints_basin_allocation!( allocgraph_node_ids_basin::Vector{Int}, )::Nothing A_basin = problem[:A_basin] - problem[:basin_allocation] = @constraint( + problem[:basin_allocation] = JuMP.@constraint( problem, [i = allocgraph_node_ids_basin], A_basin[i] <= 0.0, @@ -509,7 +509,7 @@ function add_constraints_capacity!( push!(allocgraph_edge_ids_finite_capacity, i) end end - problem[:capacity] = @constraint( + problem[:capacity] = JuMP.@constraint( problem, [i = allocgraph_edge_ids_finite_capacity], F[i] <= capacity[allocgraph_edges[i].src, allocgraph_edges[i].dst], @@ -533,7 +533,7 @@ function add_constraints_source!( graph_allocation::DiGraph{Int}, )::Nothing F = problem[:F] - problem[:source] = @constraint( + problem[:source] = JuMP.@constraint( problem, [i = keys(source_edge_mapping)], F[findfirst( @@ -559,7 +559,7 @@ function add_constraints_flow_conservation!( allocgraph_node_outedge_ids::Dict{Int, Vector{Int}}, )::Nothing F = problem[:F] - problem[:flow_conservation] = @constraint( + problem[:flow_conservation] = JuMP.@constraint( problem, [i = allocgraph_node_ids_basin], sum([ @@ -585,7 +585,7 @@ function add_constraints_user_returnflow!( )::Nothing F = problem[:F] - problem[:return_flow] = @constraint( + problem[:return_flow] = JuMP.@constraint( problem, [i = allocgraph_node_ids_user_with_returnflow], F[only(allocgraph_node_outedge_ids[i])] == @@ -612,7 +612,7 @@ function add_objective_function!( problem[Symbol("A_user_$allocgraph_node_id_user")] for allocgraph_node_id_user in allocgraph_node_ids_user ] - @objective( + JuMP.@objective( problem, Max, sum(A_basin) + sum([ @@ -647,7 +647,8 @@ function allocation_problem( allocgraph_node_inedge_ids, allocgraph_node_outedge_ids = get_node_in_out_edges(graph_allocation) - problem = JuMPModel(HiGHS.Optimizer) + optimizer = JuMP.optimizer_with_attributes(HiGHS.Optimizer, "log_to_console" => false) + problem = JuMPModel(optimizer) # Add variables to problem add_variables_flow!(problem, allocgraph_edges) @@ -778,7 +779,7 @@ function set_model_state_in_allocation!( base_name = "demand_user_$allocgraph_node_id" constraints_demand = problem[Symbol(base_name)] for priority_idx in eachindex(priorities) - set_normalized_rhs( + JuMP.set_normalized_rhs( constraints_demand[priority_idx], demand_node[priority_idx](t), ) @@ -787,7 +788,7 @@ function set_model_state_in_allocation!( subnetwork_edge = source_edge_mapping[allocgraph_node_id] subnetwork_node_ids = edge_ids_flow_inv[subnetwork_edge] constraint_source = problem[:source][allocgraph_node_id] - set_normalized_rhs(constraint_source, flow[subnetwork_node_ids]) + JuMP.set_normalized_rhs(constraint_source, flow[subnetwork_node_ids]) elseif allocgraph_node_type == :basin # TODO: Compute basin flow from vertical fluxes and basin volume. # Set as basin demand if the net flow is negative, set as source @@ -810,7 +811,7 @@ function assign_allocations!(allocation_model::AllocationModel, user::User)::Not if allocgraph_node_type == :user user_idx = findsorted(user.node_id, subnetwork_node_id) base_name = "A_user_$allocgraph_node_id" - user.allocated[user_idx] .= value.(problem[Symbol(base_name)]) + user.allocated[user_idx] .= JuMP.value.(problem[Symbol(base_name)]) end end return nothing @@ -821,11 +822,17 @@ Update the allocation optimization problem for the given subnetwork with the pro and flows, solve the allocation problem and assign the results to the users. """ function allocate!(p::Parameters, allocation_model::AllocationModel, t::Float64)::Nothing + (; problem) = allocation_model + # Update allocation problem with data from main model set_model_state_in_allocation!(allocation_model, p, t) # Solve the allocation problem - optimize!(allocation_model.problem) + JuMP.optimize!(problem) + @debug JuMP.solution_summary(problem) + if JuMP.termination_status(problem) !== JuMP.OPTIMAL + error("Allocation coudn't find optimal solution.") + end # Assign the allocations to the users assign_allocations!(allocation_model, p.user) diff --git a/core/src/bmi.jl b/core/src/bmi.jl index 815f65955..1c762ecd5 100644 --- a/core/src/bmi.jl +++ b/core/src/bmi.jl @@ -60,17 +60,6 @@ function BMI.initialize(T::Type{Model}, config::Config)::Model error("Invalid fractional flow node combinations found.") end - for id in pid_control.node_id - id_controlled = only(outneighbors(connectivity.graph_control, id)) - pump_idx = findsorted(pump.node_id, id_controlled) - if pump_idx === nothing - outlet_idx = findsorted(outlet.node_id, id_controlled) - outlet.is_pid_controlled[outlet_idx] = true - else - pump.is_pid_controlled[pump_idx] = true - end - end - # tell the solver to stop when new data comes in # TODO add all time tables here time_flow_boundary = load_structvector(db, config, FlowBoundaryTimeV1) @@ -116,7 +105,7 @@ function BMI.initialize(T::Type{Model}, config::Config)::Model end @debug "Setup ODEProblem." - callback, saved_flow = create_callbacks(parameters; config.solver.saveat) + callback, saved_flow = create_callbacks(parameters, config; config.solver.saveat) @debug "Created callbacks." # Initialize the integrator, providing all solver options as described in @@ -209,7 +198,8 @@ are combined to a CallbackSet that goes to the integrator. Returns the CallbackSet and the SavedValues for flow. """ function create_callbacks( - parameters; + parameters::Parameters, + config::Config; saveat, )::Tuple{CallbackSet, SavedValues{Float64, Vector{Float64}}} (; starttime, basin, tabulated_rating_curve, discrete_control) = parameters @@ -223,6 +213,15 @@ function create_callbacks( tabulated_rating_curve_cb = PresetTimeCallback(tstops, update_tabulated_rating_curve!) push!(callbacks, tabulated_rating_curve_cb) + if config.allocation.use_allocation + allocation_cb = PeriodicCallback( + update_allocation!, + config.allocation.timestep; + initial_affect = true, + ) + push!(callbacks, allocation_cb) + end + # save the flows over time, as a Vector of the nonzeros(flow) saved_flow = SavedValues(Float64, Vector{Float64}) save_flow_cb = SavingCallback(save_flow, saved_flow; saveat, save_start = false) @@ -502,6 +501,14 @@ function update_basin(integrator)::Nothing return nothing end +"Solve the allocation problem for all users and assign allocated abstractions to user nodes." +function update_allocation!(integrator)::Nothing + (; p, t) = integrator + for allocation_model in integrator.p.connectivity.allocation_models + allocate!(p, allocation_model, t) + end +end + "Load updates from 'TabulatedRatingCurve / time' into the parameters" function update_tabulated_rating_curve!(integrator)::Nothing (; node_id, tables, time) = integrator.p.tabulated_rating_curve diff --git a/core/src/config.jl b/core/src/config.jl index 04da2a593..d9743b446 100644 --- a/core/src/config.jl +++ b/core/src/config.jl @@ -122,13 +122,15 @@ end timing::Bool = false end +@option struct Allocation <: TableOption + timestep::Union{Float64, Nothing} = nothing + use_allocation::Bool = false +end + @option @addnodetypes struct Config <: TableOption starttime::DateTime endtime::DateTime - # [s] Δt for periodic update frequency, including user horizons - update_timestep::Float64 = 60 * 60 * 24.0 - # optional, when Config is created from a TOML file, this is its directory relative_dir::String = "." # ignored(!) input_dir::String = "." @@ -137,12 +139,12 @@ end # input, required database::String - # results, required - results::Results = Results() - + allocation::Allocation = Allocation() solver::Solver = Solver() - logging::Logging = Logging() + + # results, required + results::Results = Results() end function Configurations.from_dict(::Type{Logging}, ::Type{LogLevel}, level::AbstractString) diff --git a/core/src/create.jl b/core/src/create.jl index 2513d0a37..67c8dc248 100644 --- a/core/src/create.jl +++ b/core/src/create.jl @@ -230,7 +230,6 @@ function Connectivity(db::DB, config::Config, chunk_size::Int)::Connectivity flow = FixedSizeDiffCache(flow, chunk_size) end - # TODO: Create allocation models from input here allocation_models = AllocationModel[] return Connectivity( @@ -246,6 +245,53 @@ function Connectivity(db::DB, config::Config, chunk_size::Int)::Connectivity ) end +function generate_allocation_models!(p::Parameters, db::DB, config::Config)::Nothing + (; connectivity) = p + node = load_structvector(db, config, NodeV1) + edge = load_structvector(db, config, EdgeV1) + + # coalesce control_state to nothing to avoid boolean groupby logic on missing + allocation_groups_node = + IterTools.groupby(row -> coalesce(row.allocation_network_id, nothing), node) + allocation_groups_edge = + IterTools.groupby(row -> coalesce(row.allocation_network_id, nothing), edge) + + allocation_groups_node_dict = Dict{Int, StructVector}() + allocation_groups_edge_dict = Dict{Int, StructVector}() + + for allocation_group_node in allocation_groups_node + allocation_network_id = first(allocation_group_node).allocation_network_id + if !ismissing(allocation_network_id) + allocation_groups_node_dict[allocation_network_id] = allocation_group_node + end + end + for allocation_group_edge in allocation_groups_edge + allocation_network_id = first(allocation_group_edge).allocation_network_id + if !ismissing(allocation_network_id) + allocation_groups_edge_dict[allocation_network_id] = allocation_group_edge + end + end + + for (allocation_network_id, allocation_group_node) in allocation_groups_node_dict + allocation_group_edge = get( + allocation_groups_edge_dict, + allocation_network_id, + StructVector{EdgeV1}(undef, 0), + ) + source_edge_ids = [row.fid for row in allocation_group_edge] + push!( + connectivity.allocation_models, + AllocationModel( + p, + allocation_group_node.fid, + source_edge_ids, + config.allocation.timestep, + ), + ) + end + return nothing +end + function LinearResistance(db::DB, config::Config)::LinearResistance static = load_structvector(db, config, LinearResistanceStaticV1) parsed_parameters, valid = parse_static_and_time(db, config, "LinearResistance"; static) @@ -766,6 +812,18 @@ function Parameters(db::DB, config::Config)::Parameters basin = Basin(db, config, chunk_size) + # Set is_pid_controlled to true for those pumps and outlets that are PID controlled + for id in pid_control.node_id + id_controlled = only(outneighbors(connectivity.graph_control, id)) + pump_idx = findsorted(pump.node_id, id_controlled) + if pump_idx === nothing + outlet_idx = findsorted(outlet.node_id, id_controlled) + outlet.is_pid_controlled[outlet_idx] = true + else + pump.is_pid_controlled[pump_idx] = true + end + end + p = Parameters( config.starttime, connectivity, @@ -791,5 +849,9 @@ function Parameters(db::DB, config::Config)::Parameters end end end + # Allocation data structures + if config.allocation.use_allocation + generate_allocation_models!(p, db, config) + end return p end diff --git a/core/src/validation.jl b/core/src/validation.jl index f67a955e0..4d1bcd2f5 100644 --- a/core/src/validation.jl +++ b/core/src/validation.jl @@ -137,11 +137,11 @@ n_neighbor_bounds_control(::Val{:User}) = n_neighbor_bounds(0, 0, 0, 0) n_neighbor_bounds_control(nodetype) = error("'n_neighbor_bounds_control' not defined for $nodetype.") -# TODO NodeV1 and EdgeV1 are not yet used @version NodeV1 begin fid::Int name::String = isnothing(s) ? "" : String(s) type::String = in(Symbol(type), nodetypes) ? type : error("Unknown node type $type") + allocation_network_id::Union{Missing, Int} end @version EdgeV1 begin @@ -150,6 +150,7 @@ end from_node_id::Int to_node_id::Int edge_type::String + allocation_network_id::Union{Missing, Int} end @version PumpStaticV1 begin diff --git a/core/test/allocation.jl b/core/test/allocation.jl index 1e892ab8e..556fb5f71 100644 --- a/core/test/allocation.jl +++ b/core/test/allocation.jl @@ -1,6 +1,7 @@ import Ribasim +import JuMP using SQLite -using JuMP: value +using PreallocationTools: get_tmp @testset "Allocation solve" begin toml_path = normpath(@__DIR__, "../../generated_testmodels/subnetwork/ribasim.toml") @@ -12,23 +13,16 @@ using JuMP: value p = Ribasim.Parameters(db, cfg) close(db) - # Inputs specific for this test model - subgraph_node_ids = unique(keys(p.lookup)) - source_edge_ids = [p.connectivity.edge_ids_flow[(1, 2)]] - flow = Ribasim.get_tmp(p.connectivity.flow, 0) + flow = get_tmp(p.connectivity.flow, 0) flow[1, 2] = 4.5 # Source flow - Δt_allocation = 24.0 * 60^2 - t = 0.0 + allocation_model = p.connectivity.allocation_models[1] + Ribasim.allocate!(p, allocation_model, 0.0) - allocation_model = - Ribasim.AllocationModel(p, subgraph_node_ids, source_edge_ids, Δt_allocation) - Ribasim.allocate!(p, allocation_model, t) - - F = value.(allocation_model.problem[:F]) - @test F ≈ [3.0, 3.0, 0.5, 3.5, 1.0, 4.5] + F = JuMP.value.(allocation_model.problem[:F]) + @test F ≈ [4.0, 0.5, 0.0, 4.0, -0.0, 4.5] allocated = p.user.allocated - @test allocated[1] ≈ [0.0, 1.0, 0.0] - @test allocated[2] ≈ [0.0, 0.5, 0.0] - @test allocated[3] ≈ [3.0, 0.0, 0.0] + @test allocated[1] ≈ [0.0, 0.5] + @test allocated[2] ≈ [4.0, 0.0] + @test allocated[3] ≈ [0.0, 0.0] end diff --git a/core/test/config.jl b/core/test/config.jl index 7c935d1d1..03cc9bc37 100644 --- a/core/test/config.jl +++ b/core/test/config.jl @@ -18,7 +18,6 @@ using CodecZstd: ZstdCompressor @testset "testrun" begin config = Ribasim.Config(normpath(@__DIR__, "testrun.toml")) @test config isa Ribasim.Config - @test config.update_timestep == 86400.0 @test config.endtime > config.starttime @test config.solver == Ribasim.Solver(; saveat = 86400.0) @test config.results.compression == Ribasim.zstd diff --git a/core/test/docs.toml b/core/test/docs.toml index fb6c8f0ec..6b9124cf0 100644 --- a/core/test/docs.toml +++ b/core/test/docs.toml @@ -3,26 +3,19 @@ starttime = 2019-01-01 # required endtime = 2021-01-01 # required -# all timesteps are in seconds -update_timestep = 86400.0 # optional, default 1 day - # input files database = "database.gpkg" # required -[results] -# These results files are always written -flow = "results/flow.arrow" # optional, default "results/flow.arrow" -basin = "results/basin.arrow" # optional, default "results/basin.arrow" -control = "results/control.arrow" # optional, default "results/control.arrow" -compression = "zstd" # optional, default "zstd", also supports "lz4" -compression_level = 6 # optional, default 6 - # Specific tables can also go into Arrow files rather than the database. # For large tables this can benefit from better compressed file sizes. # This is optional, tables are retrieved from the database if not specified in the TOML. [basin] time = "basin/time.arrow" +[allocation] +timestep = 86400 +use_allocation = true + [solver] algorithm = "QNDF" # optional, default "QNDF" saveat = [] # optional, default [], which saves every timestep @@ -41,3 +34,11 @@ autodiff = true # optional, default true # defines the logging level of Ribasim verbosity = "info" # optional, default "info", can otherwise be "debug", "warn" or "error" timing = false # optional, whether to log debug timing statements + +[results] +# These results files are always written +flow = "results/flow.arrow" # optional, default "results/flow.arrow" +basin = "results/basin.arrow" # optional, default "results/basin.arrow" +control = "results/control.arrow" # optional, default "results/control.arrow" +compression = "zstd" # optional, default "zstd", also supports "lz4" +compression_level = 6 # optional, default 6 diff --git a/core/test/testrun.toml b/core/test/testrun.toml index 608baf802..10b04b46e 100644 --- a/core/test/testrun.toml +++ b/core/test/testrun.toml @@ -1,9 +1,6 @@ starttime = 2019-01-01 endtime = 2019-12-31 -# [s] Δt for periodic update frequency, including user horizons -update_timestep = 86400.0 - # optional, default is the path of the TOML input_dir = "../generated_testmodels/lhm" results_dir = "../generated_testmodels/lhm" diff --git a/docs/core/usage.qmd b/docs/core/usage.qmd index 65c3b3659..a0a5e18bb 100644 --- a/docs/core/usage.qmd +++ b/docs/core/usage.qmd @@ -66,6 +66,11 @@ The total maximum number of iterations `maxiters = 1e9`, can normally stay as-is The absolute and relative tolerance for adaptive timestepping can be set with `abstol` and `reltol`. For more information on these and other solver options, see the [DifferentialEquations.jl docs](https://docs.sciml.ai/DiffEqDocs/latest/basics/common_solver_opts/#solver_options). +## Allocation settings +Currently there are the following allocation settings: +- `use_allocation`: A boolean which says whether allocation should be used or not; +- `timestep`: a float value in seconds which dictates the update interval for allocations. + # GeoPackage database and Arrow tables The input and output tables described below all share that they are tabular files. The Node @@ -119,11 +124,13 @@ in some way. Counter intuitively, even systems you may think of as edges, such a are nodes in Ribasim. This is because edges only define direct instantaneous couplings between nodes, and never have storage of their own. -column | type | restriction ---------- | -------- | ----------- -fid | Int | unique, sorted -type | String | known node type -geometry | geoarrow | (optional) +column | type | restriction +--------------------- | -------- | ----------- +fid | Int | unique, sorted +type | String | known node type +geometry | geoarrow | (optional) +name | String | (optional, does not have to be unique) +allocation_network_id | Int | (optional) The available node types as of this writing are listed as the top level bullets below. The sub-bullets indicate which tables are associated to the node type. The table name is the @@ -186,13 +193,15 @@ There are currently 2 possible edge types: Control edges should always point away from the control node. The edges between the control node and the nodes it *listens* to are *not* present in `Edge \ static`, these are defined in [Control / condition](usage.qmd#sec-condition) -column | type | restriction --------------- | -------- | ----------- -fid | Int | unique, sorted -from_node_id | Int | - -to_node_id | Int | - -edge_type | String | must be "flow" or "control" -geom | geometry | (optional) +column | type | restriction +--------------------- | -------- | ----------- +fid | Int | unique, sorted +from_node_id | Int | - +to_node_id | Int | - +edge_type | String | must be "flow" or "control" +geom | geometry | (optional) +name | String | (optional, does not have to be unique) +allocation_network_id | Int | (optional, denotes source in allocation network) Similarly to the node table, you can use a geometry to visualize the connections between the nodes in QGIS. For instance, you can draw a line connecting the two node coordinates. diff --git a/docs/schema/Allocation.schema.json b/docs/schema/Allocation.schema.json new file mode 100644 index 000000000..d81c4423d --- /dev/null +++ b/docs/schema/Allocation.schema.json @@ -0,0 +1,29 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://deltares.github.io/Ribasim/schema/Allocation.schema.json", + "title": "Allocation", + "description": "A Allocation object based on Ribasim.config.Allocation", + "type": "object", + "properties": { + "timestep": { + "format": "default", + "anyOf": [ + { + "type": "null" + }, + { + "type": "number" + } + ], + "default": null + }, + "use_allocation": { + "format": "default", + "type": "boolean", + "default": false + } + }, + "required": [ + "use_allocation" + ] +} diff --git a/docs/schema/Config.schema.json b/docs/schema/Config.schema.json index 58dea9967..446f9c99a 100644 --- a/docs/schema/Config.schema.json +++ b/docs/schema/Config.schema.json @@ -13,11 +13,6 @@ "format": "date-time", "type": "string" }, - "update_timestep": { - "format": "double", - "type": "number", - "default": 86400 - }, "relative_dir": { "format": "default", "type": "string", @@ -37,15 +32,11 @@ "format": "default", "type": "string" }, - "results": { - "$ref": "https://deltares.github.io/Ribasim/schema/Results.schema.json", + "allocation": { + "$ref": "https://deltares.github.io/Ribasim/schema/Allocation.schema.json", "default": { - "basin": "results/basin.arrow", - "flow": "results/flow.arrow", - "control": "results/control.arrow", - "outstate": null, - "compression": "zstd", - "compression_level": 6 + "timestep": null, + "use_allocation": false } }, "solver": { @@ -75,6 +66,17 @@ "timing": false } }, + "results": { + "$ref": "https://deltares.github.io/Ribasim/schema/Results.schema.json", + "default": { + "basin": "results/basin.arrow", + "flow": "results/flow.arrow", + "control": "results/control.arrow", + "outstate": null, + "compression": "zstd", + "compression_level": 6 + } + }, "terminal": { "$ref": "https://deltares.github.io/Ribasim/schema/terminal.schema.json", "default": { @@ -166,14 +168,14 @@ "required": [ "starttime", "endtime", - "update_timestep", "relative_dir", "input_dir", "results_dir", "database", - "results", + "allocation", "solver", "logging", + "results", "terminal", "pid_control", "level_boundary", diff --git a/docs/schema/Edge.schema.json b/docs/schema/Edge.schema.json index 7cea96291..96f434e7e 100644 --- a/docs/schema/Edge.schema.json +++ b/docs/schema/Edge.schema.json @@ -25,6 +25,17 @@ "format": "default", "type": "string" }, + "allocation_network_id": { + "format": "default", + "anyOf": [ + { + "type": "null" + }, + { + "type": "integer" + } + ] + }, "remarks": { "description": "a hack for pandera", "type": "string", diff --git a/docs/schema/Node.schema.json b/docs/schema/Node.schema.json index 18a7dc643..bf3d3d1b8 100644 --- a/docs/schema/Node.schema.json +++ b/docs/schema/Node.schema.json @@ -17,6 +17,17 @@ "format": "default", "type": "string" }, + "allocation_network_id": { + "format": "default", + "anyOf": [ + { + "type": "null" + }, + { + "type": "integer" + } + ] + }, "remarks": { "description": "a hack for pandera", "type": "string", diff --git a/python/ribasim/ribasim/__init__.py b/python/ribasim/ribasim/__init__.py index 691bef5a1..fa164971e 100644 --- a/python/ribasim/ribasim/__init__.py +++ b/python/ribasim/ribasim/__init__.py @@ -2,7 +2,7 @@ from ribasim import models, utils -from ribasim.config import Config, Logging, Solver +from ribasim.config import Allocation, Config, Logging, Solver from ribasim.geometry.edge import Edge from ribasim.geometry.node import Node from ribasim.model import Model @@ -42,4 +42,5 @@ "DiscreteControl", "PidControl", "User", + "Allocation", ] diff --git a/python/ribasim/ribasim/config.py b/python/ribasim/ribasim/config.py index 9aa4edfc4..28c02343a 100644 --- a/python/ribasim/ribasim/config.py +++ b/python/ribasim/ribasim/config.py @@ -9,13 +9,9 @@ from pydantic import BaseModel, Field -class Results(BaseModel): - basin: str = "results/basin.arrow" - flow: str = "results/flow.arrow" - control: str = "results/control.arrow" - outstate: Optional[str] = None - compression: str = "zstd" - compression_level: int = 6 +class Allocation(BaseModel): + timestep: Optional[float] = None + use_allocation: bool = False class Solver(BaseModel): @@ -38,6 +34,15 @@ class Logging(BaseModel): timing: bool = False +class Results(BaseModel): + basin: str = "results/basin.arrow" + flow: str = "results/flow.arrow" + control: str = "results/control.arrow" + outstate: Optional[str] = None + compression: str = "zstd" + compression_level: int = 6 + + class Terminal(BaseModel): static: Optional[str] = None @@ -102,21 +107,13 @@ class FractionalFlow(BaseModel): class Config(BaseModel): starttime: datetime endtime: datetime - update_timestep: float = 86400 relative_dir: str = "." input_dir: str = "." results_dir: str = "." database: str - results: Results = Field( - default_factory=lambda: Results.parse_obj( - { - "basin": "results/basin.arrow", - "flow": "results/flow.arrow", - "control": "results/control.arrow", - "outstate": None, - "compression": "zstd", - "compression_level": 6, - } + allocation: Allocation = Field( + default_factory=lambda: Allocation.parse_obj( + {"timestep": None, "use_allocation": False} ) ) solver: Solver = Field( @@ -142,6 +139,18 @@ class Config(BaseModel): {"verbosity": {"level": 0}, "timing": False} ) ) + results: Results = Field( + default_factory=lambda: Results.parse_obj( + { + "basin": "results/basin.arrow", + "flow": "results/flow.arrow", + "control": "results/control.arrow", + "outstate": None, + "compression": "zstd", + "compression_level": 6, + } + ) + ) terminal: Terminal = Field( default_factory=lambda: Terminal.parse_obj({"static": None}) ) diff --git a/python/ribasim/ribasim/model.py b/python/ribasim/ribasim/model.py index b829d3fc1..7632cc3be 100644 --- a/python/ribasim/ribasim/model.py +++ b/python/ribasim/ribasim/model.py @@ -14,7 +14,7 @@ from pydantic import BaseModel from ribasim import geometry, node_types -from ribasim.config import Logging, Solver +from ribasim.config import Allocation, Logging, Solver from ribasim.geometry.edge import Edge from ribasim.geometry.node import Node @@ -80,6 +80,8 @@ class Model(BaseModel): Starting time of the simulation. endtime : Union[str, datetime.datetime] End time of the simulation. + allocation : Optional[Allocation] + Allocation settings. solver : Optional[Solver] Solver settings. logging : Optional[logging] @@ -103,6 +105,7 @@ class Model(BaseModel): user: Optional[User] starttime: datetime.datetime endtime: datetime.datetime + allocation: Optional[Allocation] solver: Optional[Solver] logging: Optional[Logging] @@ -141,6 +144,10 @@ def _write_toml(self, directory: FilePath): section = {k: v for k, v in self.solver.dict().items() if v is not None} content["solver"] = section + if self.allocation is not None: + section = {k: v for k, v in self.allocation.dict().items() if v is not None} + content["allocation"] = section + # TODO This should be rewritten as self.dict(exclude_unset=True, exclude_defaults=True) # after we make sure that we have a (sub)model that's only the config, instead # the mix of models and config it is now. diff --git a/python/ribasim/ribasim/models.py b/python/ribasim/ribasim/models.py index eba64fd86..f5a6487fb 100644 --- a/python/ribasim/ribasim/models.py +++ b/python/ribasim/ribasim/models.py @@ -65,6 +65,7 @@ class Edge(BaseModel): from_node_id: int to_node_id: int edge_type: str + allocation_network_id: Optional[int] = None remarks: str = Field("", description="a hack for pandera") @@ -126,6 +127,7 @@ class Node(BaseModel): fid: int name: str type: str + allocation_network_id: Optional[int] = None remarks: str = Field("", description="a hack for pandera") diff --git a/python/ribasim_testmodels/ribasim_testmodels/allocation.py b/python/ribasim_testmodels/ribasim_testmodels/allocation.py index a30fc2bbf..5ea605a3e 100644 --- a/python/ribasim_testmodels/ribasim_testmodels/allocation.py +++ b/python/ribasim_testmodels/ribasim_testmodels/allocation.py @@ -165,7 +165,7 @@ def subnetwork_model(): # Make sure the feature id starts at 1: explicitly give an index. node = ribasim.Node( static=gpd.GeoDataFrame( - data={"type": node_type}, + data={"type": node_type, "allocation_network_id": 1}, index=pd.Index(np.arange(len(xy)) + 1, name="fid"), geometry=node_xy, crs="EPSG:28992", @@ -177,6 +177,8 @@ def subnetwork_model(): [1, 2, 3, 2, 2, 5, 6, 7, 6, 8, 6, 13, 10, 11, 12], dtype=np.int64 ) to_id = np.array([2, 3, 4, 10, 5, 6, 7, 8, 11, 12, 13, 9, 2, 6, 8], dtype=np.int64) + allocation_network_id = len(from_id) * [None] + allocation_network_id[0] = 1 lines = ribasim.utils.geometry_from_connectivity(node, from_id, to_id) edge = ribasim.Edge( static=gpd.GeoDataFrame( @@ -184,6 +186,7 @@ def subnetwork_model(): "from_node_id": from_id, "to_node_id": to_id, "edge_type": len(from_id) * ["flow"], + "allocation_network_id": allocation_network_id, }, geometry=lines, crs="EPSG:28992", @@ -192,7 +195,7 @@ def subnetwork_model(): # Setup the basins: profile = pd.DataFrame( - data={"node_id": [2, 2, 6, 6, 8, 8], "area": 1000.0, "level": 3 * [0.0, 1.0]} + data={"node_id": [2, 2, 6, 6, 8, 8], "area": 100000.0, "level": 3 * [0.0, 1.0]} ) static = pd.DataFrame( @@ -206,39 +209,30 @@ def subnetwork_model(): } ) - state = pd.DataFrame(data={"node_id": [2, 6, 8], "level": 1.0}) + state = pd.DataFrame(data={"node_id": [2, 6, 8], "level": 10.0}) basin = ribasim.Basin(profile=profile, static=static, state=state) # Setup the flow boundary: flow_boundary = ribasim.FlowBoundary( - static=pd.DataFrame(data={"node_id": [1], "flow_rate": [4.5]}) + time=pd.DataFrame( + data={ + "node_id": 1, + "flow_rate": np.arange(10, 0, -1), + "time": [f"2020-{i}-1 00:00:00" for i in range(1, 11)], + } + ) ) # Setup the users: user = ribasim.User( static=pd.DataFrame( data={ - "node_id": [10, 12], - "demand": [1.0, 4.0], - "return_factor": 0.9, - "min_level": 0.9, - "priority": [2, 1], - } - ), - time=pd.DataFrame( - data={ - "node_id": 11, - "time": [ - "2020-01-01 00:00:00", - "2021-01-01 00:00:00", - "2020-01-01 00:00:00", - "2021-01-01 00:00:00", - ], - "demand": [2.0, 2.0, 0.0, 3.0], + "node_id": [10, 11, 12], + "demand": [4.0, 5.0, 3.0], "return_factor": 0.9, "min_level": 0.9, - "priority": [2, 2, 3, 3], + "priority": [2, 1, 2], } ), ) @@ -270,6 +264,9 @@ def subnetwork_model(): ) ) + # Setup allocation: + allocation = ribasim.Allocation(use_allocation=True, timestep=86400) + model = ribasim.Model( node=node, edge=edge, @@ -279,8 +276,9 @@ def subnetwork_model(): pump=pump, outlet=outlet, terminal=terminal, + allocation=allocation, starttime="2020-01-01 00:00:00", - endtime="2020-01-01 00:00:00", + endtime="2020-04-01 00:00:00", ) return model @@ -348,7 +346,7 @@ def looped_subnetwork_model(): # Make sure the feature id starts at 1: explicitly give an index. node = ribasim.Node( static=gpd.GeoDataFrame( - data={"type": node_type}, + data={"type": node_type, "allocation_network_id": 1}, index=pd.Index(np.arange(len(xy)) + 1, name="fid"), geometry=node_xy, crs="EPSG:28992", @@ -528,7 +526,7 @@ def looped_subnetwork_model(): tabulated_rating_curve=rating_curve, terminal=terminal, starttime="2020-01-01 00:00:00", - endtime="2020-01-01 00:00:00", + endtime="2021-01-01 00:00:00", ) return model