From 177cfc2af3aa8de852eba62409c355c05409783b Mon Sep 17 00:00:00 2001 From: Valentin Kaisermayer Date: Sun, 27 Nov 2022 12:32:45 +0100 Subject: [PATCH 1/4] supports moi solvers without NlpBlock robustifies MOI term collection fix fix fix format do the expression expansion ourselfs adds returncode handling format make sure that simplify returns eventually expand quad expressions dont call on operator changes @asserts to meaningfull exceptions error early if NaN or Inf is in an expression adds test adds opt fix test use process_p_u0_symbolic formatting --- lib/OptimizationMOI/Project.toml | 5 +- lib/OptimizationMOI/src/OptimizationMOI.jl | 425 +++++------------- lib/OptimizationMOI/src/moi.jl | 492 +++++++++++++++++++++ lib/OptimizationMOI/src/nlp.jl | 407 +++++++++++++++++ lib/OptimizationMOI/test/runtests.jl | 83 +++- src/function/mtk.jl | 106 +++++ 6 files changed, 1200 insertions(+), 318 deletions(-) create mode 100644 lib/OptimizationMOI/src/moi.jl create mode 100644 lib/OptimizationMOI/src/nlp.jl diff --git a/lib/OptimizationMOI/Project.toml b/lib/OptimizationMOI/Project.toml index 151a238d3..8e3bbdc28 100644 --- a/lib/OptimizationMOI/Project.toml +++ b/lib/OptimizationMOI/Project.toml @@ -5,10 +5,12 @@ version = "0.1.12" [deps] MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" +ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7" +Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" [compat] Ipopt_jll = "=300.1400.400" @@ -21,6 +23,7 @@ julia = "1" [extras] Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84" AmplNLWriter = "7c4d4715-977e-5154-bfe0-e096adeac482" +HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7" ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" @@ -29,4 +32,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [targets] -test = ["Ipopt", "AmplNLWriter", "Ipopt_jll", "ModelingToolkit", "NLopt", "Juniper", "Test", "Zygote"] +test = ["HiGHS", "Ipopt", "AmplNLWriter", "Ipopt_jll", "ModelingToolkit", "NLopt", "Juniper", "Test", "Zygote"] diff --git a/lib/OptimizationMOI/src/OptimizationMOI.jl b/lib/OptimizationMOI/src/OptimizationMOI.jl index 450b58003..a87d26b62 100644 --- a/lib/OptimizationMOI/src/OptimizationMOI.jl +++ b/lib/OptimizationMOI/src/OptimizationMOI.jl @@ -2,7 +2,13 @@ module OptimizationMOI using Reexport @reexport using Optimization -using MathOptInterface, Optimization.SciMLBase, SparseArrays +using MathOptInterface +using Optimization.SciMLBase +using SparseArrays +import ModelingToolkit +using ModelingToolkit: parameters, states, varmap_to_vars, mergedefaults +const MTK = ModelingToolkit +import Symbolics const MOI = MathOptInterface @@ -21,239 +27,25 @@ function SciMLBase.allowscallback(opt::Union{MOI.AbstractOptimizer, false end -struct MOIOptimizationProblem{T, F <: OptimizationFunction, uType, P, - JT <: DenseOrSparse{T}, HT <: DenseOrSparse{T}, - CHT <: DenseOrSparse{T}} <: - MOI.AbstractNLPEvaluator - f::F - u0::uType - p::P - J::JT - H::HT - cons_H::Vector{CHT} - lcons::Vector{T} - ucons::Vector{T} -end - -function MOIOptimizationProblem(prob::OptimizationProblem) - num_cons = prob.ucons === nothing ? 0 : length(prob.ucons) - f = Optimization.instantiate_function(prob.f, prob.u0, prob.f.adtype, prob.p, num_cons) - T = eltype(prob.u0) - n = length(prob.u0) - return MOIOptimizationProblem(f, - prob.u0, - prob.p, - isnothing(f.cons_jac_prototype) ? zeros(T, num_cons, n) : - convert.(T, f.cons_jac_prototype), - isnothing(f.hess_prototype) ? zeros(T, n, n) : - convert.(T, f.hess_prototype), - isnothing(f.cons_hess_prototype) ? - Matrix{T}[zeros(T, n, n) for i in 1:num_cons] : - [convert.(T, f.cons_hess_prototype[i]) - for i in 1:num_cons], - prob.lcons === nothing ? fill(-Inf, num_cons) : - prob.lcons, - prob.ucons === nothing ? fill(Inf, num_cons) : prob.ucons) -end - -function MOI.features_available(prob::MOIOptimizationProblem) - features = [:Grad, :Hess, :Jac] - # Assume that if there are constraints and expr then cons_expr exists - if prob.f.expr !== nothing - push!(features, :ExprGraph) - end - return features -end - -function MOI.initialize(moiproblem::MOIOptimizationProblem, - requested_features::Vector{Symbol}) - available_features = MOI.features_available(moiproblem) - for feat in requested_features - if !(feat in available_features) - error("Unsupported feature $feat") - # TODO: implement Jac-vec and Hess-vec products - # for solvers that need them - end - end - return -end - -function MOI.eval_objective(moiproblem::MOIOptimizationProblem, x) - return moiproblem.f(x, moiproblem.p) -end - -function MOI.eval_constraint(moiproblem::MOIOptimizationProblem, g, x) - moiproblem.f.cons(g, x) - return -end - -function MOI.eval_objective_gradient(moiproblem::MOIOptimizationProblem, G, x) - moiproblem.f.grad(G, x) - return -end - -# This structure assumes the calculation of moiproblem.J is dense. -function MOI.jacobian_structure(moiproblem::MOIOptimizationProblem) - if moiproblem.J isa SparseMatrixCSC - rows, cols, _ = findnz(moiproblem.J) - inds = Tuple{Int, Int}[(i, j) for (i, j) in zip(rows, cols)] - else - rows, cols = size(moiproblem.J) - inds = Tuple{Int, Int}[(i, j) for j in 1:cols for i in 1:rows] - end - return inds -end - -function MOI.eval_constraint_jacobian(moiproblem::MOIOptimizationProblem, j, x) - if isempty(j) - return - elseif moiproblem.f.cons_j === nothing - error("Use OptimizationFunction to pass the derivatives or " * - "automatically generate them with one of the autodiff backends") - end - moiproblem.f.cons_j(moiproblem.J, x) - if moiproblem.J isa SparseMatrixCSC - nnz = nonzeros(moiproblem.J) - @assert length(j) == length(nnz) - for (i, Ji) in zip(eachindex(j), nnz) - j[i] = Ji - end - else - for i in eachindex(j) - j[i] = moiproblem.J[i] - end - end - return -end - -function MOI.hessian_lagrangian_structure(moiproblem::MOIOptimizationProblem) - sparse_obj = moiproblem.H isa SparseMatrixCSC - sparse_constraints = all(H -> H isa SparseMatrixCSC, moiproblem.cons_H) - if !sparse_constraints && any(H -> H isa SparseMatrixCSC, moiproblem.cons_H) - # Some constraint hessians are dense and some are sparse! :( - error("Mix of sparse and dense constraint hessians are not supported") - end - N = length(moiproblem.u0) - inds = if sparse_obj - rows, cols, _ = findnz(moiproblem.H) - Tuple{Int, Int}[(i, j) for (i, j) in zip(rows, cols) if i <= j] - else - Tuple{Int, Int}[(row, col) for col in 1:N for row in 1:col] - end - if sparse_constraints - for Hi in moiproblem.cons_H - r, c, _ = findnz(Hi) - for (i, j) in zip(r, c) - if i <= j - push!(inds, (i, j)) - end - end - end - elseif !sparse_obj - # Performance optimization. If both are dense, no need to repeat - else - for col in 1:N, row in 1:col - push!(inds, (row, col)) - end - end - return inds -end - -function MOI.eval_hessian_lagrangian(moiproblem::MOIOptimizationProblem{T}, - h, - x, - σ, - μ) where {T} - fill!(h, zero(T)) - k = 0 - moiproblem.f.hess(moiproblem.H, x) - sparse_objective = moiproblem.H isa SparseMatrixCSC - if sparse_objective - rows, cols, _ = findnz(moiproblem.H) - for (i, j) in zip(rows, cols) - if i <= j - k += 1 - h[k] = σ * moiproblem.H[i, j] - end - end - else - for i in 1:size(moiproblem.H, 1), j in 1:i - k += 1 - h[k] = σ * moiproblem.H[i, j] - end - end - # A count of the number of non-zeros in the objective Hessian is needed if - # the constraints are dense. - nnz_objective = k - if !isempty(μ) && !all(iszero, μ) - moiproblem.f.cons_h(moiproblem.cons_H, x) - for (μi, Hi) in zip(μ, moiproblem.cons_H) - if Hi isa SparseMatrixCSC - rows, cols, _ = findnz(Hi) - for (i, j) in zip(rows, cols) - if i <= j - k += 1 - h[k] += μi * Hi[i, j] - end - end - else - # The constraints are dense. We only store one copy of the - # Hessian, so reset `k` to where it starts. That will be - # `nnz_objective` if the objective is sprase, and `0` otherwise. - k = sparse_objective ? nnz_objective : 0 - for i in 1:size(Hi, 1), j in 1:i - k += 1 - h[k] += μi * Hi[i, j] - end - end - end - end - return -end - -_replace_variable_indices(expr) = expr - -function _replace_variable_indices(expr::Expr) - if expr.head == :ref - @assert length(expr.args) == 2 - @assert expr.args[1] == :x - return Expr(:ref, :x, MOI.VariableIndex(expr.args[2])) - end - for i in 1:length(expr.args) - expr.args[i] = _replace_variable_indices(expr.args[i]) - end - return expr -end - -function MOI.objective_expr(prob::MOIOptimizationProblem) - return _replace_variable_indices(prob.f.expr) -end - -function MOI.constraint_expr(prob::MOIOptimizationProblem, i) - # expr has the form f(x) == 0 - expr = _replace_variable_indices(prob.f.cons_expr[i].args[2]) - lb, ub = prob.lcons[i], prob.ucons[i] - return :($lb <= $expr <= $ub) -end - function _create_new_optimizer(opt::MOI.OptimizerWithAttributes) - return _create_new_optimizer(MOI.instantiate(opt)) + return _create_new_optimizer(MOI.instantiate(opt, with_bridge_type = Float64)) end -function _create_new_optimizer(model::MOI.AbstractOptimizer) - if !MOI.is_empty(model) - MOI.empty!(model) +function _create_new_optimizer(opt::MOI.AbstractOptimizer) + if !MOI.is_empty(opt) + MOI.empty!(opt) # important! ensure that the optimizer is empty end - if MOI.supports_incremental_interface(model) - return model + if MOI.supports_incremental_interface(opt) + return opt end - return MOI.Utilities.CachingOptimizer(MOI.Utilities.UniversalFallback(MOI.Utilities.Model{ - Float64 - }()), - model) + opt_setup = MOI.Utilities.CachingOptimizer(MOI.Utilities.UniversalFallback(MOI.Utilities.Model{ + Float64 + }()), + opt) + return opt_setup end -function __map_optimizer_args(prob::OptimizationProblem, +function __map_optimizer_args(cache, opt::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes }; maxiters::Union{Number, Nothing} = nothing, @@ -324,97 +116,116 @@ function __moi_status_to_ReturnCode(status::MOI.TerminationStatusCode) end end -function _create_integer_domain(id::Int, lb::Nothing, ub::Nothing) - MOI.Integer() -end - -function _create_integer_domain(id::Int, lb::Vector, ub::Vector) - (iszero(lb[id]) && isone(ub[id])) && return MOI.ZeroOne() - return _create_integer_domain(id, nothing, nothing) +""" +Replaces every expression `:x[i]` with `:x[MOI.VariableIndex(i)]` +""" +_replace_variable_indices!(expr) = expr +function _replace_variable_indices!(expr::Expr) + if expr.head == :ref && expr.args[1] == :x + return Expr(:ref, :x, MOI.VariableIndex(expr.args[2])) + end + for i in 1:length(expr.args) + expr.args[i] = _replace_variable_indices!(expr.args[i]) + end + return expr end -function SciMLBase.__solve(prob::OptimizationProblem, - opt::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes}; - maxiters::Union{Number, Nothing} = nothing, - maxtime::Union{Number, Nothing} = nothing, - abstol::Union{Number, Nothing} = nothing, - reltol::Union{Number, Nothing} = nothing, - kwargs...) - maxiters = Optimization._check_and_convert_maxiters(maxiters) - maxtime = Optimization._check_and_convert_maxtime(maxtime) - opt_setup = __map_optimizer_args(prob, - opt; - abstol = abstol, - reltol = reltol, - maxiters = maxiters, - maxtime = maxtime, - kwargs...) - num_variables = length(prob.u0) - θ = MOI.add_variables(opt_setup, num_variables) - if prob.lb !== nothing - @assert eachindex(prob.lb) == Base.OneTo(num_variables) - for i in 1:num_variables - if prob.lb[i] > -Inf - MOI.add_constraint(opt_setup, θ[i], MOI.GreaterThan(prob.lb[i])) - end - end +""" +Replaces every expression `:p[i]` with its numeric value from `p` +""" +_replace_parameter_indices!(expr, p) = expr +function _replace_parameter_indices!(expr::Expr, p) + if expr.head == :ref && expr.args[1] == :p + p_ = p[expr.args[2]] + (!isa(p_, Real) || isnan(p_) || isinf(p_)) && + throw(ArgumentError("Expected parameters to be real valued: $(expr.args[2]) => $p_")) + return p_ end - if prob.ub !== nothing - @assert eachindex(prob.ub) == Base.OneTo(num_variables) - for i in 1:num_variables - if prob.ub[i] < Inf - MOI.add_constraint(opt_setup, θ[i], MOI.LessThan(prob.ub[i])) - end - end + for i in 1:length(expr.args) + expr.args[i] = _replace_parameter_indices!(expr.args[i], p) end + return expr +end - if prob.int !== nothing - @assert eachindex(prob.int) == Base.OneTo(num_variables) - for i in 1:num_variables - if prob.int[i] - MOI.add_constraint(opt_setup, θ[i], - _create_integer_domain(i, prob.lb, prob.ub)) - end - end +""" +Replaces calls like `:(getindex, 1, :x)` with `:(x[1])` +""" +repl_getindex!(expr::T) where {T} = expr +function repl_getindex!(expr::Expr) + if expr.head == :call && expr.args[1] == :getindex + return Expr(:ref, expr.args[2], expr.args[3]) end - - if MOI.supports(opt_setup, MOI.VariablePrimalStart(), MOI.VariableIndex) - @assert eachindex(prob.u0) == Base.OneTo(num_variables) - for i in 1:num_variables - MOI.set(opt_setup, MOI.VariablePrimalStart(), θ[i], prob.u0[i]) - end + for i in 1:length(expr.args) + expr.args[i] = repl_getindex!(expr.args[i]) end - MOI.set(opt_setup, - MOI.ObjectiveSense(), - prob.sense === Optimization.MaxSense ? MOI.MAX_SENSE : MOI.MIN_SENSE) - if prob.lcons === nothing - @assert prob.ucons === nothing - con_bounds = MOI.NLPBoundsPair[] + return expr +end + +function __moi_status_to_ReturnCode(status::MOI.TerminationStatusCode) + if status in [ + MOI.OPTIMAL, + MOI.LOCALLY_SOLVED, + MOI.ALMOST_OPTIMAL, + MOI.ALMOST_LOCALLY_SOLVED, + ] + return ReturnCode.Success + elseif status in [ + MOI.INFEASIBLE, + MOI.DUAL_INFEASIBLE, + MOI.LOCALLY_INFEASIBLE, + MOI.INFEASIBLE_OR_UNBOUNDED, + MOI.ALMOST_INFEASIBLE, + MOI.ALMOST_DUAL_INFEASIBLE, + ] + return ReturnCode.Infeasible + elseif status in [ + MOI.ITERATION_LIMIT, + MOI.NODE_LIMIT, + MOI.SLOW_PROGRESS, + ] + return ReturnCode.MaxIters + elseif status == MOI.TIME_LIMIT + return ReturnCode.MaxTime + elseif status in [ + MOI.OPTIMIZE_NOT_CALLED, + MOI.NUMERICAL_ERROR, + MOI.INVALID_MODEL, + MOI.INVALID_OPTION, + MOI.INTERRUPTED, + MOI.OTHER_ERROR, + MOI.SOLUTION_LIMIT, + MOI.MEMORY_LIMIT, + MOI.OBJECTIVE_LIMIT, + MOI.NORM_LIMIT, + MOI.OTHER_LIMIT, + ] + return ReturnCode.Failure else - @assert prob.ucons !== nothing - con_bounds = MOI.NLPBoundsPair.(prob.lcons, prob.ucons) + return ReturnCode.Default end - MOI.set(opt_setup, - MOI.NLPBlock(), - MOI.NLPBlockData(con_bounds, MOIOptimizationProblem(prob), true)) - t0 = time() - MOI.optimize!(opt_setup) - t1 = time() - if MOI.get(opt_setup, MOI.ResultCount()) >= 1 - minimizer = MOI.get(opt_setup, MOI.VariablePrimal(), θ) - objective = MOI.get(opt_setup, MOI.ObjectiveValue()) - opt_ret = __moi_status_to_ReturnCode(MOI.get(opt_setup, MOI.TerminationStatus())) +end + +include("nlp.jl") +include("moi.jl") + +function SciMLBase.supports_opt_cache_interface(alg::Union{MOI.AbstractOptimizer, + MOI.OptimizerWithAttributes}) + true +end + +function SciMLBase.__init(prob::OptimizationProblem, + opt::Union{MOI.AbstractOptimizer, MOI.OptimizerWithAttributes}; + maxiters::Union{Number, Nothing} = nothing, + maxtime::Union{Number, Nothing} = nothing, + abstol::Union{Number, Nothing} = nothing, + reltol::Union{Number, Nothing} = nothing, + kwargs...) + cache = if MOI.supports(_create_new_optimizer(opt), MOI.NLPBlock()) + MOIOptimizationNLPCache(prob, opt; maxiters, maxtime, abstol, reltol, kwargs...) else - minimizer = fill(NaN, num_variables) - objective = NaN - opt_ret = ReturnCode.Default + MOIOptimizationCache(prob, opt; maxiters, maxtime, abstol, reltol, kwargs...) end - return SciMLBase.build_solution(SciMLBase.DefaultOptimizationCache(prob.f, prob.p), - opt, - minimizer, - objective; - original = opt_setup, - retcode = opt_ret, solve_time = t1 - t0) + return cache end end diff --git a/lib/OptimizationMOI/src/moi.jl b/lib/OptimizationMOI/src/moi.jl new file mode 100644 index 000000000..a0bc79129 --- /dev/null +++ b/lib/OptimizationMOI/src/moi.jl @@ -0,0 +1,492 @@ +mutable struct MOIOptimizationCache{F <: OptimizationFunction, uType, P, LB, UB, I, S, EX, + CEX, O} <: SciMLBase.AbstractOptimizationCache + f::F + u0::uType + p::P + lb::LB + ub::UB + int::I + sense::S + expr::EX + cons_expr::CEX + opt::O + solver_args::NamedTuple +end + +function MOIOptimizationCache(prob::OptimizationProblem, opt; kwargs...) + isnothing(prob.f.sys) && + throw(ArgumentError("Expected an `OptimizationProblem` that was setup via an `OptimizationSystem`, consider `modelingtoolkitize(prob).`")) + + # TODO: check if the problem is at most bilinear, i.e. affine and or quadratic terms in two variables + expr_map = get_expr_map(prob.f.sys) + expr = repl_getindex!(convert_to_expr(MTK.subs_constants(MTK.objective(prob.f.sys)), + prob.f.sys; expand_expr = false, expr_map)) + + cons = MTK.constraints(prob.f.sys) + cons_expr = Vector{Expr}(undef, length(cons)) + Threads.@sync for i in eachindex(cons) + Threads.@spawn cons_expr[i] = repl_getindex!(convert_to_expr(Symbolics.canonical_form(MTK.subs_constants(cons[i])), + prob.f.sys; + expand_expr = false, + expr_map)) + end + + return MOIOptimizationCache(prob.f, + prob.u0, + prob.p, + prob.lb, + prob.ub, + prob.int, + prob.sense, + expr, + cons_expr, + opt, + NamedTuple(kwargs)) +end + +SciMLBase.has_reinit(cache::MOIOptimizationCache) = true +function SciMLBase.reinit!(cache::MOIOptimizationCache; p = missing, u0 = missing) + if p === missing && u0 === missing + p, u0 = cache.p, cache.u0 + else # at least one of them has a value + if p === missing + p = cache.p + end + if u0 === missing + u0 = cache.u0 + end + if (eltype(p) <: Pair && !isempty(p)) || (eltype(u0) <: Pair && !isempty(u0)) # one is a non-empty symbolic map + hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :ps) || + throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." * + " Please use `remake` with the `p` keyword argument as a vector of values, paying attention to parameter order.")) + hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :states) || + throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." * + " Please use `remake` with the `u0` keyword argument as a vector of values, paying attention to state order.")) + p, u0 = SciMLBase.process_p_u0_symbolic(cache, p, u0) + end + end + + cache.p = p + cache.u0 = u0 + + return cache +end + +struct MalformedExprException <: Exception + msg::String +end +function Base.showerror(io::IO, e::MalformedExprException) + print(io, "MalformedExprException: ", e.msg) +end + +function _add_moi_variables!(opt_setup, cache::MOIOptimizationCache) + num_variables = length(cache.u0) + θ = MOI.add_variables(opt_setup, num_variables) + if cache.lb !== nothing + eachindex(cache.lb) == Base.OneTo(num_variables) || + throw(ArgumentError("Expected `cache.lb` to be of the same length as the number of variables.")) + end + if cache.ub !== nothing + eachindex(cache.ub) == Base.OneTo(num_variables) || + throw(ArgumentError("Expected `cache.ub` to be of the same length as the number of variables.")) + end + + for i in 1:num_variables + if cache.lb !== nothing && cache.lb[i] > -Inf + MOI.add_constraint(opt_setup, θ[i], MOI.GreaterThan(Float64(cache.lb[i]))) + end + if cache.ub !== nothing && cache.ub[i] < Inf + MOI.add_constraint(opt_setup, θ[i], MOI.LessThan(Float64(cache.ub[i]))) + end + if cache.int !== nothing && cache.int[i] + if cache.lb !== nothing && cache.lb[i] == 0 && cache.ub !== nothing && + cache.ub[i] == 1 + MOI.add_constraint(opt_setup, θ[i], MOI.ZeroOne()) + else + MOI.add_constraint(opt_setup, θ[i], MOI.Integer()) + end + end + end + + if MOI.supports(opt_setup, MOI.VariablePrimalStart(), MOI.VariableIndex) + eachindex(cache.u0) == Base.OneTo(num_variables) || + throw(ArgumentError("Expected `cache.u0` to be of the same length as the number of variables.")) + for i in 1:num_variables + MOI.set(opt_setup, MOI.VariablePrimalStart(), θ[i], Float64(cache.u0[i])) + end + end + return θ +end + +function SciMLBase.__solve(cache::MOIOptimizationCache) + maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters) + maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime) + opt_setup = __map_optimizer_args(cache, + cache.opt; + abstol = cache.solver_args.abstol, + reltol = cache.solver_args.reltol, + maxiters = maxiters, + maxtime = maxtime, + cache.solver_args...) + + Theta = _add_moi_variables!(opt_setup, cache) + MOI.set(opt_setup, + MOI.ObjectiveSense(), + cache.sense === Optimization.MaxSense ? MOI.MAX_SENSE : MOI.MIN_SENSE) + + if !isnothing(cache.cons_expr) + for cons_expr in cache.cons_expr + expr = _replace_parameter_indices!(deepcopy(cons_expr.args[2]), # f(x) == 0 or f(x) <= 0 + cache.p) + expr = fixpoint_simplify_and_expand!(expr) + func, c = try + get_moi_function(expr) # find: f(x) + c == 0 or f(x) + c <= 0 + catch e + if e isa MalformedExprException + rethrow(MalformedExprException("$expr")) + else + rethrow(e) + end + end + if is_eq(cons_expr) + MOI.add_constraint(opt_setup, func, MOI.EqualTo(Float64(-c))) + elseif is_leq(cons_expr) + MOI.add_constraint(opt_setup, func, MOI.LessThan(Float64(-c))) + else + throw(MalformedExprException("$expr")) + end + end + end + + # objective + expr = _replace_parameter_indices!(deepcopy(cache.expr), cache.p) + expr = fixpoint_simplify_and_expand!(expr) + func, c = try + get_moi_function(expr) + catch e + if e isa MalformedExprException + rethrow(MalformedExprException("$expr")) + else + rethrow(e) + end + end + MOI.set(opt_setup, MOI.ObjectiveFunction{typeof(func)}(), func) + + MOI.optimize!(opt_setup) + if MOI.get(opt_setup, MOI.ResultCount()) >= 1 + minimizer = MOI.get(opt_setup, MOI.VariablePrimal(), Theta) + minimum = MOI.get(opt_setup, MOI.ObjectiveValue()) + opt_ret = __moi_status_to_ReturnCode(MOI.get(opt_setup, MOI.TerminationStatus())) + else + minimizer = fill(NaN, length(Theta)) + minimum = NaN + opt_ret = SciMLBase.ReturnCode.Default + end + return SciMLBase.build_solution(cache, + cache.opt, + minimizer, + minimum; + original = opt_setup, + retcode = opt_ret) +end + +function get_moi_function(expr) + affine_terms = MOI.ScalarAffineTerm{Float64}[] + quadratic_terms = MOI.ScalarQuadraticTerm{Float64}[] + constant = Ref(0.0) + collect_moi_terms!(expr, + affine_terms, + quadratic_terms, + constant) + func = if isempty(quadratic_terms) + MOI.ScalarAffineFunction(affine_terms, 0.0) + else + MOI.ScalarQuadraticFunction(quadratic_terms, affine_terms, 0.0) + end + return func, constant[] +end + +simplify_and_expand!(expr::T) where {T} = expr +simplify_and_expand!(expr::Rational) = Float64(expr) + +""" +Simplify and expands the given expression. All computations on numbers are evaluated and simplified. +After successive application the resulting expression should only contain terms of the form `:(a * x[i])` or `:(a * x[i] * x[j])`. +Also mutates the given expression in-place, however incorrectly! +""" +function simplify_and_expand!(expr::Expr) # looks awful but this is actually much faster than `Metatheory.jl` + if expr.head == :call && length(expr.args) == 3 + if expr.args[1] == :(*) && expr.args[2] isa Number && expr.args[3] isa Number # a::Number * b::Number => a * b + return expr.args[2] * expr.args[3] + elseif expr.args[1] == :(+) && expr.args[2] isa Number && expr.args[3] isa Number # a::Number + b::Number => a + b + return expr.args[2] + expr.args[3] + elseif expr.args[1] == :(^) && expr.args[2] isa Number && expr.args[3] isa Number # a::Number^b::Number => a^b + return expr.args[2]^expr.args[3] + elseif expr.args[1] == :(/) && expr.args[2] isa Number && expr.args[3] isa Number # a::Number/b::Number => a/b + return expr.args[2] / expr.args[3] + elseif expr.args[1] == :(//) && expr.args[2] isa Number && expr.args[3] isa Number # a::Number//b::Number => a/b + return expr.args[2] / expr.args[3] + elseif expr.args[1] == :(*) && isa(expr.args[2], Real) && isone(expr.args[2]) # 1 * x => x + return expr.args[3] + elseif expr.args[1] == :(*) && isa(expr.args[3], Real) && isone(expr.args[3]) # x * 1 => x + return expr.args[2] + elseif expr.args[1] == :(*) && isa(expr.args[2], Real) && iszero(expr.args[2]) # 0 * x => 0 + return 0 + elseif expr.args[1] == :(*) && isa(expr.args[3], Real) && iszero(expr.args[3]) # x * 0 => x + return 0 + elseif expr.args[1] == :(+) && isa(expr.args[2], Real) && iszero(expr.args[2]) # 0 + x => x + return expr.args[3] + elseif expr.args[1] == :(+) && isa(expr.args[3], Real) && iszero(expr.args[3]) # x + 0 => x + return expr.args[2] + elseif expr.args[1] == :(/) && isa(expr.args[3], Real) && isone(expr.args[3]) # x / 1 => x + return expr.args[2] + elseif expr.args[1] == :// && isa(expr.args[3], Real) && isone(expr.args[3]) # x // 1 => x + return expr.args[2] + elseif expr.args[1] == :(^) && isa(expr.args[3], Int) && expr.args[3] == 2 # x^2 => x * x + if isa(expr.args[2], Expr) && expr.args[2].head == :call && + expr.args[2].args[1] == :+ # (x + y)^2 => (x^2 + ((2 * (x * y)) + y^2)) + return Expr(:call, :+, + Expr(:call, :^, expr.args[2].args[2], 2), + Expr(:call, :+, + Expr(:call, :*, 2, + Expr(:call, :*, expr.args[2].args[2], + expr.args[2].args[3])), + Expr(:call, :^, expr.args[2].args[3], 2))) + else + return Expr(:call, :*, expr.args[2], expr.args[2]) # x^2 => x * x + end + elseif expr.args[1] == :(^) && isa(expr.args[3], Int) && expr.args[3] > 2 # x^n => x * x^(n-1) + return Expr(:call, :*, Expr(:call, :^, expr.args[2], expr.args[3] - 1), + expr.args[2]) + elseif expr.args[1] == :(*) && isa(expr.args[3], Number) # x * a::Number => a * x + return Expr(:call, :*, expr.args[3], expr.args[2]) + elseif expr.args[1] == :(+) && isa(expr.args[3], Number) # x + a::Number => a + x + return Expr(:call, :+, expr.args[3], expr.args[2]) + elseif expr.args[1] == :(*) && isa(expr.args[3], Expr) && + expr.args[3].head == :call && expr.args[3].args[1] == :(+) # (x * (y + z)) => ((x * y) + (x * z)) + return Expr(:call, :+, + Expr(:call, :*, expr.args[2], expr.args[3].args[2]), + Expr(:call, :*, expr.args[2], expr.args[3].args[3])) + elseif expr.args[1] == :(*) && isa(expr.args[2], Expr) && + expr.args[2].head == :call && expr.args[2].args[1] == :(+) # ((y + z) * x) => ((x * y) + (x * z)) + return Expr(:call, :+, + Expr(:call, :*, expr.args[3], expr.args[2].args[2]), + Expr(:call, :*, expr.args[3], expr.args[2].args[3])) + elseif expr.args[1] == :(*) && expr.args[2] isa Number && isa(expr.args[3], Expr) && + expr.args[3].head == :call && expr.args[3].args[1] == :(*) && + expr.args[3].args[2] isa Number # a::Number * (b::Number * c) => (a * b) * c + return Expr(:call, :*, expr.args[2] * expr.args[3].args[2], + expr.args[3].args[3]) + elseif expr.args[1] == :(+) && isa(expr.args[3], Expr) && + isa(expr.args[2], Number) && + expr.args[3].head == :call && expr.args[3].args[1] == :(+) && + isa(expr.args[3].args[2], Number) # a::Number + (b::Number + x) => (a+b) + x + return Expr(:call, :+, expr.args[2] + expr.args[3].args[2], + expr.args[3].args[3]) + elseif expr.args[1] == :(*) && isa(expr.args[3], Expr) && + expr.args[3].head == :call && expr.args[3].args[1] == :(*) && + isa(expr.args[3].args[2], Number) # x * (a::Number * y) => a * (x * y) + return Expr(:call, :*, expr.args[3].args[2], + Expr(:call, :*, expr.args[2], expr.args[3].args[3])) + elseif expr.args[1] == :(*) && isa(expr.args[2], Expr) && + expr.args[2].head == :call && expr.args[2].args[1] == :(*) && + isa(expr.args[2].args[2], Number) # (a::Number * x) * y => a * (x * y) + return Expr(:call, :*, expr.args[2].args[2], + Expr(:call, :*, expr.args[2].args[3], expr.args[3])) + end + elseif expr.head == :call && all(isa.(expr.args[2:end], Number)) # func(a::Number...) + return eval(expr) + end + for i in 1:length(expr.args) + expr.args[i] = simplify_and_expand!(expr.args[i]) + end + return expr +end + +""" +Simplifies the given expression until a fixed-point is reached and the expression no longer changes. +Will not terminate if a cycle occurs! +""" +function fixpoint_simplify_and_expand!(expr; iter_max = typemax(Int) - 1) + i = 0 + iter_max >= 0 || throw(ArgumentError("Expected `iter_max` to be positive.")) + while i <= iter_max + expr_old = deepcopy(expr) + expr = simplify_and_expand!(expr) + expr_old == expr && break # might not return if a cycle is reached + i += 1 + end + return expr +end + +function collect_moi_terms!(expr::Real, affine_terms, quadratic_terms, constant) + (isnan(expr) || isinf(expr)) && throw(MalformedExprException("$expr")) + constant[] += expr +end + +function collect_moi_terms!(expr::Expr, affine_terms, quadratic_terms, constant) + if expr.head == :call + length(expr.args) == 3 || throw(MalformedExprException("$expr")) + if expr.args[1] == :(+) + for i in 2:length(expr.args) + collect_moi_terms!(expr.args[i], affine_terms, quadratic_terms, constant) + end + elseif expr.args[1] == :(*) + if isa(expr.args[2], Number) && isa(expr.args[3], Expr) + if expr.args[3].head == :call && expr.args[3].args[1] == :(*) # a::Number * (x[i] * x[j]) + x1 = _get_variable_index_from_expr(expr.args[3].args[2]) + x2 = _get_variable_index_from_expr(expr.args[3].args[3]) + factor = x1 == x2 ? 2.0 : 1.0 + c = factor * Float64(expr.args[2]) + (isnan(c) || isinf(c)) && throw(MalformedExprException("$expr")) + push!(quadratic_terms, MOI.ScalarQuadraticTerm(c, x1, x2)) + elseif expr.args[3].head == :ref # a::Number * x[i] + x = _get_variable_index_from_expr(expr.args[3]) + c = Float64(expr.args[2]) + (isnan(c) || isinf(c)) && throw(MalformedExprException("$expr")) + push!(affine_terms, MOI.ScalarAffineTerm(c, x)) + else + throw(MalformedExprException("$expr")) + end + elseif isa(expr.args[2], Number) && isa(expr.args[3], Number) # a::Number * b::Number + c = expr.args[2] * expr.args[3] + (isnan(c) || isinf(c)) && throw(MalformedExprException("$expr")) + constant[] += c + elseif isa(expr.args[2], Expr) && isa(expr.args[3], Expr) + if expr.args[2].head == :call && expr.args[2].args[1] == :(*) && + isa(expr.args[2].args[2], Number) # (a::Number * x[i]) * x[j] + x1 = _get_variable_index_from_expr(expr.args[2].args[3]) + x2 = _get_variable_index_from_expr(expr.args[3]) + factor = x1 == x2 ? 2.0 : 1.0 + c = factor * Float64(expr.args[2].args[2]) + (isnan(c) || isinf(c)) && throw(MalformedExprException("$expr")) + push!(quadratic_terms, MOI.ScalarQuadraticTerm(c, x1, x2)) + else # x[i] * x[j] + x1 = _get_variable_index_from_expr(expr.args[2]) + x2 = _get_variable_index_from_expr(expr.args[3]) + factor = x1 == x2 ? 2.0 : 1.0 + push!(quadratic_terms, + MOI.ScalarQuadraticTerm(factor, + x1, x2)) + end + else + throw(MalformedExprException("$expr")) + end + end + elseif expr.head == :ref # x[i] + expr.args[1] == :x || throw(MalformedExprException("$expr")) + push!(affine_terms, MOI.ScalarAffineTerm(1.0, MOI.VariableIndex(expr.args[2]))) + else + throw(MalformedExprException("$expr")) + end + + return +end + +_get_variable_index_from_expr(expr::T) where {T} = throw(MalformedExprException("$expr")) +function _get_variable_index_from_expr(expr::Expr) + _is_var_ref_expr(expr) + return MOI.VariableIndex(expr.args[2]) +end + +function _is_var_ref_expr(expr::Expr) + expr.head == :ref || throw(MalformedExprException("$expr")) # x[i] + expr.args[1] == :x || throw(MalformedExprException("$expr")) + return true +end + +function is_eq(expr::Expr) + expr.head == :call || throw(MalformedExprException("$expr")) + expr.args[1] in [:(==), :(=)] +end + +function is_leq(expr::Expr) + expr.head == :call || throw(MalformedExprException("$expr")) + expr.args[1] == :(<=) +end + +############################################################################################## +## TODO: remove if in ModelingToolkit +""" + rep_pars_vals!(expr::T, expr_map) + +Replaces variable expressions of the form `:some_variable` or `:(getindex, :some_variable, j)` with +`x[i]` were `i` is the corresponding index in the state vector. Same for the parameters. The +variable/parameter pairs are provided via the `expr_map`. + +Expects only expressions where the variables and parameters are of the form `:some_variable` +or `:(getindex, :some_variable, j)` or :(some_variable[j]). +""" +rep_pars_vals!(expr::T, expr_map) where {T} = expr +function rep_pars_vals!(expr::Symbol, expr_map) + for (f, n) in expr_map + isequal(f, expr) && return n + end + return expr +end +function rep_pars_vals!(expr::Expr, expr_map) + if (expr.head == :call && expr.args[1] == getindex) || (expr.head == :ref) + for (f, n) in expr_map + isequal(f, expr) && return n + end + end + Threads.@sync for i in eachindex(expr.args) + i == 1 && expr.head == :call && continue # first arg is the operator + Threads.@spawn expr.args[i] = rep_pars_vals!(expr.args[i], expr_map) + end + return expr +end + +""" + symbolify!(e) + +Ensures that a given expression is fully symbolic, e.g. no function calls. +""" +symbolify!(e) = e +function symbolify!(e::Expr) + if !(e.args[1] isa Symbol) + e.args[1] = Symbol(e.args[1]) + end + symbolify!.(e.args) + return e +end + +""" + get_expr_map(sys) + +Make a map from every parameter and state of the given system to an expression indexing its position +in the state or parameter vector. +""" +function get_expr_map(sys) + dvs = ModelingToolkit.states(sys) + ps = ModelingToolkit.parameters(sys) + return vcat([ModelingToolkit.toexpr(_s) => Expr(:ref, :x, i) + for (i, _s) in enumerate(dvs)], + [ModelingToolkit.toexpr(_p) => Expr(:ref, :p, i) + for (i, _p) in enumerate(ps)]) +end + +""" + convert_to_expr(eq, sys; expand_expr = false, pairs_arr = expr_map(sys)) + +Converts the given symbolic expression to a Julia `Expr` and replaces all symbols, i.e. states and +parameters with `x[i]` and `p[i]`. + +# Arguments: +- `eq`: Expression to convert +- `sys`: Reference to the system holding the parameters and states +- `expand_expr=false`: If `true` the symbolic expression is expanded first. +""" +function convert_to_expr(eq, sys; expand_expr = false, expr_map = get_expr_map(sys)) + if expand_expr + eq = try + Symbolics.expand(eq) # PolyForm sometimes errors + catch e + Symbolics.expand(eq) + end + end + expr = ModelingToolkit.toexpr(eq) + expr = rep_pars_vals!(expr, expr_map) + expr = symbolify!(expr) + return expr +end diff --git a/lib/OptimizationMOI/src/nlp.jl b/lib/OptimizationMOI/src/nlp.jl new file mode 100644 index 000000000..a1a7e1001 --- /dev/null +++ b/lib/OptimizationMOI/src/nlp.jl @@ -0,0 +1,407 @@ +mutable struct MOIOptimizationNLPEvaluator{T, F <: OptimizationFunction, RC, LB, UB, + I, + JT <: DenseOrSparse{T}, HT <: DenseOrSparse{T}, + CHT <: DenseOrSparse{T}, S} <: + MOI.AbstractNLPEvaluator + f::F + reinit_cache::RC + lb::LB + ub::UB + int::I + lcons::Vector{T} + ucons::Vector{T} + sense::S + J::JT + H::HT + cons_H::Vector{CHT} +end + +function Base.getproperty(evaluator::MOIOptimizationNLPEvaluator, x::Symbol) + if x in fieldnames(Optimization.ReInitCache) + return getfield(evaluator.reinit_cache, x) + end + return getfield(evaluator, x) +end + +struct MOIOptimizationNLPCache{E <: MOIOptimizationNLPEvaluator, O} <: + SciMLBase.AbstractOptimizationCache + evaluator::E + opt::O + solver_args::NamedTuple +end + +function Base.getproperty(cache::MOIOptimizationNLPCache{E}, name::Symbol) where {E} + if name in fieldnames(E) + return getfield(cache.evaluator, name) + elseif name in fieldnames(Optimization.ReInitCache) + return getfield(cache.evaluator.reinit_cache, name) + end + return getfield(cache, name) +end +function Base.setproperty!(cache::MOIOptimizationNLPCache{E}, name::Symbol, x) where {E} + if name in fieldnames(E) + return setfield!(cache.evaluator, name, x) + elseif name in fieldnames(Optimization.ReInitCache) + return setfield!(cache.evaluator.reinit_cache, name, x) + end + return setfield!(cache, name, x) +end + +function SciMLBase.get_p(sol::SciMLBase.OptimizationSolution{T, N, uType, C}) where {T, N, + uType, + C <: + MOIOptimizationNLPCache + } + sol.cache.evaluator.p +end +function SciMLBase.get_observed(sol::SciMLBase.OptimizationSolution{T, N, uType, C}) where { + T, + N, + uType, + C <: + MOIOptimizationNLPCache + } + sol.cache.evaluator.f.observed +end +function SciMLBase.get_syms(sol::SciMLBase.OptimizationSolution{T, N, uType, C}) where {T, + N, + uType, + C <: + MOIOptimizationNLPCache + } + sol.cache.evaluator.f.syms +end +function SciMLBase.get_paramsyms(sol::SciMLBase.OptimizationSolution{T, N, uType, C}) where { + T, + N, + uType, + C <: + MOIOptimizationNLPCache + } + sol.cache.evaluator.f.paramsyms +end + +function MOIOptimizationNLPCache(prob::OptimizationProblem, opt; kwargs...) + reinit_cache = Optimization.ReInitCache(prob.u0, prob.p) # everything that can be changed via `reinit` + + num_cons = prob.ucons === nothing ? 0 : length(prob.ucons) + f = Optimization.instantiate_function(prob.f, reinit_cache, prob.f.adtype, num_cons) + T = eltype(prob.u0) + n = length(prob.u0) + + J = if isnothing(f.cons_jac_prototype) + zeros(T, num_cons, n) + else + convert.(T, f.cons_jac_prototype) + end + H = if isnothing(f.hess_prototype) + zeros(T, n, n) + else + convert.(T, f.hess_prototype) + end + cons_H = if isnothing(f.cons_hess_prototype) + Matrix{T}[zeros(T, n, n) for i in 1:num_cons] + else + [convert.(T, f.cons_hess_prototype[i]) for i in 1:num_cons] + end + lcons = prob.lcons === nothing ? fill(-Inf, num_cons) : prob.lcons + ucons = prob.ucons === nothing ? fill(Inf, num_cons) : prob.ucons + + evaluator = MOIOptimizationNLPEvaluator(f, + reinit_cache, + prob.lb, + prob.ub, + prob.int, + lcons, + ucons, + prob.sense, + J, + H, + cons_H) + return MOIOptimizationNLPCache(evaluator, opt, NamedTuple(kwargs)) +end + +SciMLBase.has_reinit(cache::MOIOptimizationNLPCache) = true +function SciMLBase.reinit!(cache::MOIOptimizationNLPCache; p = missing, u0 = missing) + if p === missing && u0 === missing + p, u0 = cache.p, cache.u0 + else # at least one of them has a value + if p === missing + p = cache.p + end + if u0 === missing + u0 = cache.u0 + end + if (eltype(p) <: Pair && !isempty(p)) || (eltype(u0) <: Pair && !isempty(u0)) # one is a non-empty symbolic map + hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :ps) || + throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." * + " Please use `remake` with the `p` keyword argument as a vector of values, paying attention to parameter order.")) + hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :states) || + throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." * + " Please use `remake` with the `u0` keyword argument as a vector of values, paying attention to state order.")) + p, u0 = SciMLBase.process_p_u0_symbolic(cache, p, u0) + end + end + + cache.p = p + cache.u0 = u0 + + return cache +end + +function MOI.features_available(evaluator::MOIOptimizationNLPEvaluator) + features = [:Grad, :Hess, :Jac] + # Assume that if there are constraints and expr then cons_expr exists + if evaluator.f.expr !== nothing + push!(features, :ExprGraph) + end + return features +end + +function MOI.initialize(evaluator::MOIOptimizationNLPEvaluator, + requested_features::Vector{Symbol}) + available_features = MOI.features_available(evaluator) + for feat in requested_features + if !(feat in available_features) + error("Unsupported feature $feat") + # TODO: implement Jac-vec and Hess-vec products + # for solvers that need them + end + end + return +end + +function MOI.eval_objective(evaluator::MOIOptimizationNLPEvaluator, x) + return evaluator.f(x, evaluator.p) +end + +function MOI.eval_constraint(evaluator::MOIOptimizationNLPEvaluator, g, x) + evaluator.f.cons(g, x) + return +end + +function MOI.eval_objective_gradient(evaluator::MOIOptimizationNLPEvaluator, G, x) + evaluator.f.grad(G, x) + return +end + +# This structure assumes the calculation of moiproblem.J is dense. +function MOI.jacobian_structure(evaluator::MOIOptimizationNLPEvaluator) + if evaluator.J isa SparseMatrixCSC + rows, cols, _ = findnz(evaluator.J) + inds = Tuple{Int, Int}[(i, j) for (i, j) in zip(rows, cols)] + else + rows, cols = size(evaluator.J) + inds = Tuple{Int, Int}[(i, j) for j in 1:cols for i in 1:rows] + end + return inds +end + +function MOI.eval_constraint_jacobian(evaluator::MOIOptimizationNLPEvaluator, j, x) + if isempty(j) + return + elseif evaluator.f.cons_j === nothing + error("Use OptimizationFunction to pass the derivatives or " * + "automatically generate them with one of the autodiff backends") + end + evaluator.f.cons_j(evaluator.J, x) + if evaluator.J isa SparseMatrixCSC + nnz = nonzeros(evaluator.J) + @assert length(j) == length(nnz) + for (i, Ji) in zip(eachindex(j), nnz) + j[i] = Ji + end + else + for i in eachindex(j) + j[i] = evaluator.J[i] + end + end + return +end + +function MOI.hessian_lagrangian_structure(evaluator::MOIOptimizationNLPEvaluator) + sparse_obj = evaluator.H isa SparseMatrixCSC + sparse_constraints = all(H -> H isa SparseMatrixCSC, evaluator.cons_H) + if !sparse_constraints && any(H -> H isa SparseMatrixCSC, evaluator.cons_H) + # Some constraint hessians are dense and some are sparse! :( + error("Mix of sparse and dense constraint hessians are not supported") + end + N = length(evaluator.u0) + inds = if sparse_obj + rows, cols, _ = findnz(evaluator.H) + Tuple{Int, Int}[(i, j) for (i, j) in zip(rows, cols) if i <= j] + else + Tuple{Int, Int}[(row, col) for col in 1:N for row in 1:col] + end + if sparse_constraints + for Hi in evaluator.cons_H + r, c, _ = findnz(Hi) + for (i, j) in zip(r, c) + if i <= j + push!(inds, (i, j)) + end + end + end + elseif !sparse_obj + # Performance optimization. If both are dense, no need to repeat + else + for col in 1:N, row in 1:col + push!(inds, (row, col)) + end + end + return inds +end + +function MOI.eval_hessian_lagrangian(evaluator::MOIOptimizationNLPEvaluator{T}, + h, + x, + σ, + μ) where {T} + fill!(h, zero(T)) + k = 0 + evaluator.f.hess(evaluator.H, x) + sparse_objective = evaluator.H isa SparseMatrixCSC + if sparse_objective + rows, cols, _ = findnz(evaluator.H) + for (i, j) in zip(rows, cols) + if i <= j + k += 1 + h[k] = σ * evaluator.H[i, j] + end + end + else + for i in 1:size(evaluator.H, 1), j in 1:i + k += 1 + h[k] = σ * evaluator.H[i, j] + end + end + # A count of the number of non-zeros in the objective Hessian is needed if + # the constraints are dense. + nnz_objective = k + if !isempty(μ) && !all(iszero, μ) + evaluator.f.cons_h(evaluator.cons_H, x) + for (μi, Hi) in zip(μ, evaluator.cons_H) + if Hi isa SparseMatrixCSC + rows, cols, _ = findnz(Hi) + for (i, j) in zip(rows, cols) + if i <= j + k += 1 + h[k] += μi * Hi[i, j] + end + end + else + # The constraints are dense. We only store one copy of the + # Hessian, so reset `k` to where it starts. That will be + # `nnz_objective` if the objective is sprase, and `0` otherwise. + k = sparse_objective ? nnz_objective : 0 + for i in 1:size(Hi, 1), j in 1:i + k += 1 + h[k] += μi * Hi[i, j] + end + end + end + end + return +end + +function MOI.objective_expr(evaluator::MOIOptimizationNLPEvaluator) + expr = deepcopy(evaluator.f.expr) + repl_getindex!(expr) + _replace_parameter_indices!(expr, evaluator.p) + _replace_variable_indices!(expr) + return expr +end + +function MOI.constraint_expr(evaluator::MOIOptimizationNLPEvaluator, i) + # expr has the form f(x,p) == 0 or f(x,p) <= 0 + cons_expr = deepcopy(evaluator.f.cons_expr[i].args[2]) + repl_getindex!(cons_expr) + _replace_parameter_indices!(cons_expr, evaluator.p) + _replace_variable_indices!(cons_expr) + lb, ub = Float64(evaluator.lcons[i]), Float64(evaluator.ucons[i]) + return :($lb <= $cons_expr <= $ub) +end + +function _add_moi_variables!(opt_setup, evaluator::MOIOptimizationNLPEvaluator) + num_variables = length(evaluator.u0) + θ = MOI.add_variables(opt_setup, num_variables) + if evaluator.lb !== nothing + eachindex(evaluator.lb) == Base.OneTo(num_variables) || + throw(ArgumentError("Expected `cache.lb` to be of the same length as the number of variables.")) + end + if evaluator.ub !== nothing + eachindex(evaluator.ub) == Base.OneTo(num_variables) || + throw(ArgumentError("Expected `cache.ub` to be of the same length as the number of variables.")) + end + + for i in 1:num_variables + if evaluator.lb !== nothing && evaluator.lb[i] > -Inf + MOI.add_constraint(opt_setup, θ[i], MOI.GreaterThan(evaluator.lb[i])) + end + if evaluator.ub !== nothing && evaluator.ub[i] < Inf + MOI.add_constraint(opt_setup, θ[i], MOI.LessThan(evaluator.ub[i])) + end + if evaluator.int !== nothing && evaluator.int[i] + if evaluator.lb !== nothing && evaluator.lb[i] == 0 && + evaluator.ub !== nothing && + evaluator.ub[i] == 1 + MOI.add_constraint(opt_setup, θ[i], MOI.ZeroOne()) + else + MOI.add_constraint(opt_setup, θ[i], MOI.Integer()) + end + end + end + + if MOI.supports(opt_setup, MOI.VariablePrimalStart(), MOI.VariableIndex) + eachindex(evaluator.u0) == Base.OneTo(num_variables) || + throw(ArgumentError("Expected `cache.u0` to be of the same length as the number of variables.")) + for i in 1:num_variables + MOI.set(opt_setup, MOI.VariablePrimalStart(), θ[i], evaluator.u0[i]) + end + end + return θ +end + +function SciMLBase.__solve(cache::MOIOptimizationNLPCache) + maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters) + maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime) + opt_setup = __map_optimizer_args(cache, + cache.opt; + abstol = cache.solver_args.abstol, + reltol = cache.solver_args.reltol, + maxiters = maxiters, + maxtime = maxtime, + cache.solver_args...) + + θ = _add_moi_variables!(opt_setup, cache.evaluator) + MOI.set(opt_setup, + MOI.ObjectiveSense(), + cache.evaluator.sense === Optimization.MaxSense ? MOI.MAX_SENSE : MOI.MIN_SENSE) + xor(isnothing(cache.evaluator.lcons), isnothing(cache.evaluator.ucons)) && + throw(ArgumentError("Expected `cache.evaluator.lcons` and `cache.evaluator.lcons` to be supplied both or none.")) + if isnothing(cache.evaluator.lcons) && isnothing(cache.evaluator.ucons) + con_bounds = MOI.NLPBoundsPair[] + else + con_bounds = MOI.NLPBoundsPair.(Float64.(cache.evaluator.lcons), + Float64.(cache.evaluator.ucons)) + end + MOI.set(opt_setup, + MOI.NLPBlock(), + MOI.NLPBlockData(con_bounds, cache.evaluator, true)) + MOI.optimize!(opt_setup) + if MOI.get(opt_setup, MOI.ResultCount()) >= 1 + minimizer = MOI.get(opt_setup, MOI.VariablePrimal(), θ) + minimum = MOI.get(opt_setup, MOI.ObjectiveValue()) + opt_ret = __moi_status_to_ReturnCode(MOI.get(opt_setup, MOI.TerminationStatus())) + else + minimizer = fill(NaN, length(θ)) + minimum = NaN + opt_ret = SciMLBase.ReturnCode.Default + end + return SciMLBase.build_solution(cache, + cache.opt, + minimizer, + minimum; + original = opt_setup, + retcode = opt_ret) +end diff --git a/lib/OptimizationMOI/test/runtests.jl b/lib/OptimizationMOI/test/runtests.jl index 468edcbeb..38e06005c 100644 --- a/lib/OptimizationMOI/test/runtests.jl +++ b/lib/OptimizationMOI/test/runtests.jl @@ -1,5 +1,5 @@ using OptimizationMOI, Optimization, Ipopt, NLopt, Zygote, ModelingToolkit -using AmplNLWriter, Ipopt_jll, Juniper +using AmplNLWriter, Ipopt_jll, Juniper, HiGHS using Test function _test_sparse_derivatives_hs071(backend, optimizer) @@ -28,7 +28,7 @@ function _test_sparse_derivatives_hs071(backend, optimizer) return end -@testset "OptimizationMOI.jl" begin +@testset "NLP" begin rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 x0 = zeros(2) _p = [1.0, 100.0] @@ -40,6 +40,11 @@ end sol = solve(prob, Ipopt.Optimizer()) @test 10 * sol.objective < l1 + # cache interface + cache = init(prob, Ipopt.Optimizer()) + sol = solve!(cache) + @test 10 * sol.minimum < l1 + optprob = OptimizationFunction(rosenbrock, Optimization.AutoZygote()) prob = OptimizationProblem(optprob, x0, _p; sense = Optimization.MinSense) @@ -85,15 +90,19 @@ end @test 10 * sol.objective < l1 end -@testset "backends" begin for backend in (Optimization.AutoModelingToolkit(false, false), - Optimization.AutoModelingToolkit(true, false), - Optimization.AutoModelingToolkit(false, true), - Optimization.AutoModelingToolkit(true, true)) - @testset "$backend" begin - _test_sparse_derivatives_hs071(backend, Ipopt.Optimizer()) - _test_sparse_derivatives_hs071(backend, AmplNLWriter.Optimizer(Ipopt_jll.amplexe)) +@testset "backends" begin + backends = (Optimization.AutoModelingToolkit(false, false), + Optimization.AutoModelingToolkit(true, false), + Optimization.AutoModelingToolkit(false, true), + Optimization.AutoModelingToolkit(true, true)) + for backend in backends + @testset "$backend" begin + _test_sparse_derivatives_hs071(backend, Ipopt.Optimizer()) + _test_sparse_derivatives_hs071(backend, + AmplNLWriter.Optimizer(Ipopt_jll.amplexe)) + end end -end end +end @testset "Integer Support" begin nl_solver = OptimizationMOI.MOI.OptimizerWithAttributes(Ipopt.Optimizer, @@ -135,3 +144,57 @@ end end @test res.objective <= 5eps() end end + +@testset "cache" begin + @variables x + @parameters a = 1.0 + @named sys = OptimizationSystem((x - a)^2, [x], [a];) + + prob = OptimizationProblem(sys, [x => 0.0], []; grad = true, hess = true) + cache = init(prob, Ipopt.Optimizer(); print_level = 0) + sol = solve!(cache) + @test sol.u ≈ [1.0] # ≈ [1] + + cache = OptimizationMOI.reinit!(cache; p = [2.0]) + sol = solve!(cache) + @test sol.u ≈ [2.0] # ≈ [2] + + prob = OptimizationProblem(sys, [x => 0.0], []; grad = false, hess = false) + cache = init(prob, HiGHS.Optimizer()) + sol = solve!(cache) + @test sol.u≈[1.0] rtol=1e-3 # ≈ [1] + + cache = OptimizationMOI.reinit!(cache; p = [2.0]) + sol = solve!(cache) + @test sol.u≈[2.0] rtol=1e-3 # ≈ [2] +end + +@testset "MOI" begin + @parameters c = 0.0 + @variables x[1:2]=[0.0, 0.0] [bounds = (c, Inf)] + @parameters a = 3.0 + @parameters b = 4.0 + @parameters d = 2.0 + @named sys = OptimizationSystem(a * x[1]^2 + b * x[2]^2 + d * x[1] * x[2] + 5 * x[1] + + x[2], [x...], [a, b, c, d]; + constraints = [ + x[1] + 2 * x[2] ~ 1.0, + ]) + prob = OptimizationProblem(sys, [x[1] => 2.0, x[2] => 0.0], []; grad = true, + hess = true) + sol = solve(prob, HiGHS.Optimizer()) + sol.u +end + +@testset "tutorial" begin + rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 + x0 = zeros(2) + _p = [1.0, 1.0] + + cons(res, x, p) = (res .= [x[1]^2 + x[2]^2, x[1] * x[2]]) + + optprob = OptimizationFunction(rosenbrock, Optimization.AutoModelingToolkit(), + cons = cons) + prob = OptimizationProblem(optprob, x0, _p, lcons = [1.0, 0.5], ucons = [1.0, 0.5]) + sol = solve(prob, AmplNLWriter.Optimizer(Ipopt_jll.amplexe)) +end diff --git a/src/function/mtk.jl b/src/function/mtk.jl index f2ddbb84b..19539b2ae 100644 --- a/src/function/mtk.jl +++ b/src/function/mtk.jl @@ -119,3 +119,109 @@ function instantiate_function(f, cache::ReInitCache, expr = symbolify(f.expr), cons_expr = symbolify.(f.cons_expr)) end + +function instantiate_function(f, cache::ReInitCache, + adtype::AutoModelingToolkit, num_cons = 0) + p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p + sys = ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, cache.p)) + + hess_prototype, cons_jac_prototype, cons_hess_prototype = nothing, nothing, nothing + + if f.grad === nothing + grad_oop, grad_iip = ModelingToolkit.generate_gradient(sys, expression = Val{false}) + grad(J, u) = (grad_iip(J, u, cache.p); J) + else + grad = (G, θ, args...) -> f.grad(G, θ, p, args...) + end + + if f.hess === nothing + hess_oop, hess_iip = ModelingToolkit.generate_hessian(sys, expression = Val{false}, + sparse = adtype.obj_sparse) + hess(H, u) = (hess_iip(H, u, cache.p); H) + else + hess = (H, θ, args...) -> f.hess(H, θ, cache.p, args...) + end + + if f.hv === nothing + hv = function (H, θ, v, args...) + res = adtype.obj_sparse ? hess_prototype : ArrayInterfaceCore.zeromatrix(θ) + hess(res, θ, args...) + H .= res * v + end + else + hv = f.hv + end + + expr = symbolify(ModelingToolkit.Symbolics.toexpr(ModelingToolkit.equations(sys))) + pairs_arr = if cache.p isa SciMLBase.NullParameters + [Symbol(_s) => Expr(:ref, :x, i) + for (i, _s) in enumerate(ModelingToolkit.states(sys))] + else + vcat([Symbol(_s) => Expr(:ref, :x, i) + for (i, _s) in enumerate(ModelingToolkit.states(sys))], + [Symbol(_p) => Expr(:ref, :p, i) + for (i, _p) in enumerate(ModelingToolkit.parameters(sys))]) + end + rep_pars_vals!(expr, pairs_arr) + + if f.cons === nothing + cons = nothing + cons_exprs = nothing + else + cons = (res, θ) -> f.cons(res, θ, cache.p) + cons_oop = (x, p) -> (_res = zeros(eltype(x), num_cons); f.cons(_res, x, p); _res) + + cons_sys = ModelingToolkit.modelingtoolkitize(NonlinearProblem(cons_oop, cache.u0, + cache.p)) # 0 = f(x) + cons_eqs_lhss = ModelingToolkit.lhss(ModelingToolkit.Symbolics.canonical_form.(ModelingToolkit.equations(cons_sys))) # -f(x) == 0 + cons_exprs = map(cons_eqs_lhss) do lhs + e = symbolify(ModelingToolkit.Symbolics.toexpr(-lhs)) # 0 == f(x) + rep_pars_vals!(e, pairs_arr) + return Expr(:call, :(==), e, :0) + end + end + + if f.cons !== nothing && f.cons_j === nothing + jac_oop, jac_iip = ModelingToolkit.generate_jacobian(cons_sys, + expression = Val{false}, + sparse = adtype.cons_sparse) + cons_j = function (J, θ) + jac_iip(J, θ, cache.p) + end + else + cons_j = (J, θ) -> f.cons_j(J, θ, cache.p) + end + + if f.cons !== nothing && f.cons_h === nothing + cons_hess_oop, cons_hess_iip = ModelingToolkit.generate_hessian(cons_sys, + expression = Val{ + false + }, + sparse = adtype.cons_sparse) + cons_h = function (res, θ) + cons_hess_iip(res, θ, cache.p) + end + else + cons_h = (res, θ) -> f.cons_h(res, θ, cache.p) + end + + if adtype.obj_sparse + _hess_prototype = ModelingToolkit.hessian_sparsity(sys) + hess_prototype = convert.(eltype(cache.u0), _hess_prototype) + end + + if adtype.cons_sparse + _cons_jac_prototype = ModelingToolkit.jacobian_sparsity(cons_sys) + cons_jac_prototype = convert.(eltype(cache.u0), _cons_jac_prototype) + _cons_hess_prototype = ModelingToolkit.hessian_sparsity(cons_sys) + cons_hess_prototype = [convert.(eltype(cache.u0), _cons_hess_prototype[i]) + for i in 1:num_cons] + end + + return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, + cons = cons, cons_j = cons_j, cons_h = cons_h, + hess_prototype = hess_prototype, + cons_jac_prototype = cons_jac_prototype, + cons_hess_prototype = cons_hess_prototype, + expr = expr, cons_expr = cons_exprs) +end From 322c62569e5653bcfb1eba767b88faa21f4077b0 Mon Sep 17 00:00:00 2001 From: Valentin Kaisermayer Date: Thu, 13 Apr 2023 16:02:46 +0200 Subject: [PATCH 2/4] fix --- src/function/mtk.jl | 106 -------------------------------------------- 1 file changed, 106 deletions(-) diff --git a/src/function/mtk.jl b/src/function/mtk.jl index 19539b2ae..f2ddbb84b 100644 --- a/src/function/mtk.jl +++ b/src/function/mtk.jl @@ -119,109 +119,3 @@ function instantiate_function(f, cache::ReInitCache, expr = symbolify(f.expr), cons_expr = symbolify.(f.cons_expr)) end - -function instantiate_function(f, cache::ReInitCache, - adtype::AutoModelingToolkit, num_cons = 0) - p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p - sys = ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, cache.p)) - - hess_prototype, cons_jac_prototype, cons_hess_prototype = nothing, nothing, nothing - - if f.grad === nothing - grad_oop, grad_iip = ModelingToolkit.generate_gradient(sys, expression = Val{false}) - grad(J, u) = (grad_iip(J, u, cache.p); J) - else - grad = (G, θ, args...) -> f.grad(G, θ, p, args...) - end - - if f.hess === nothing - hess_oop, hess_iip = ModelingToolkit.generate_hessian(sys, expression = Val{false}, - sparse = adtype.obj_sparse) - hess(H, u) = (hess_iip(H, u, cache.p); H) - else - hess = (H, θ, args...) -> f.hess(H, θ, cache.p, args...) - end - - if f.hv === nothing - hv = function (H, θ, v, args...) - res = adtype.obj_sparse ? hess_prototype : ArrayInterfaceCore.zeromatrix(θ) - hess(res, θ, args...) - H .= res * v - end - else - hv = f.hv - end - - expr = symbolify(ModelingToolkit.Symbolics.toexpr(ModelingToolkit.equations(sys))) - pairs_arr = if cache.p isa SciMLBase.NullParameters - [Symbol(_s) => Expr(:ref, :x, i) - for (i, _s) in enumerate(ModelingToolkit.states(sys))] - else - vcat([Symbol(_s) => Expr(:ref, :x, i) - for (i, _s) in enumerate(ModelingToolkit.states(sys))], - [Symbol(_p) => Expr(:ref, :p, i) - for (i, _p) in enumerate(ModelingToolkit.parameters(sys))]) - end - rep_pars_vals!(expr, pairs_arr) - - if f.cons === nothing - cons = nothing - cons_exprs = nothing - else - cons = (res, θ) -> f.cons(res, θ, cache.p) - cons_oop = (x, p) -> (_res = zeros(eltype(x), num_cons); f.cons(_res, x, p); _res) - - cons_sys = ModelingToolkit.modelingtoolkitize(NonlinearProblem(cons_oop, cache.u0, - cache.p)) # 0 = f(x) - cons_eqs_lhss = ModelingToolkit.lhss(ModelingToolkit.Symbolics.canonical_form.(ModelingToolkit.equations(cons_sys))) # -f(x) == 0 - cons_exprs = map(cons_eqs_lhss) do lhs - e = symbolify(ModelingToolkit.Symbolics.toexpr(-lhs)) # 0 == f(x) - rep_pars_vals!(e, pairs_arr) - return Expr(:call, :(==), e, :0) - end - end - - if f.cons !== nothing && f.cons_j === nothing - jac_oop, jac_iip = ModelingToolkit.generate_jacobian(cons_sys, - expression = Val{false}, - sparse = adtype.cons_sparse) - cons_j = function (J, θ) - jac_iip(J, θ, cache.p) - end - else - cons_j = (J, θ) -> f.cons_j(J, θ, cache.p) - end - - if f.cons !== nothing && f.cons_h === nothing - cons_hess_oop, cons_hess_iip = ModelingToolkit.generate_hessian(cons_sys, - expression = Val{ - false - }, - sparse = adtype.cons_sparse) - cons_h = function (res, θ) - cons_hess_iip(res, θ, cache.p) - end - else - cons_h = (res, θ) -> f.cons_h(res, θ, cache.p) - end - - if adtype.obj_sparse - _hess_prototype = ModelingToolkit.hessian_sparsity(sys) - hess_prototype = convert.(eltype(cache.u0), _hess_prototype) - end - - if adtype.cons_sparse - _cons_jac_prototype = ModelingToolkit.jacobian_sparsity(cons_sys) - cons_jac_prototype = convert.(eltype(cache.u0), _cons_jac_prototype) - _cons_hess_prototype = ModelingToolkit.hessian_sparsity(cons_sys) - cons_hess_prototype = [convert.(eltype(cache.u0), _cons_hess_prototype[i]) - for i in 1:num_cons] - end - - return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv, - cons = cons, cons_j = cons_j, cons_h = cons_h, - hess_prototype = hess_prototype, - cons_jac_prototype = cons_jac_prototype, - cons_hess_prototype = cons_hess_prototype, - expr = expr, cons_expr = cons_exprs) -end From d9205d9ef5f0c3e3e3ed83545aca97650fceffe8 Mon Sep 17 00:00:00 2001 From: Valentin Kaisermayer Date: Thu, 13 Apr 2023 16:36:58 +0200 Subject: [PATCH 3/4] adds tests --- lib/OptimizationMOI/test/runtests.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/OptimizationMOI/test/runtests.jl b/lib/OptimizationMOI/test/runtests.jl index 38e06005c..a5a750d56 100644 --- a/lib/OptimizationMOI/test/runtests.jl +++ b/lib/OptimizationMOI/test/runtests.jl @@ -152,6 +152,7 @@ end prob = OptimizationProblem(sys, [x => 0.0], []; grad = true, hess = true) cache = init(prob, Ipopt.Optimizer(); print_level = 0) + @test cache isa OptimizationMOI.MOIOptimizationNLPCache sol = solve!(cache) @test sol.u ≈ [1.0] # ≈ [1] @@ -161,6 +162,7 @@ end prob = OptimizationProblem(sys, [x => 0.0], []; grad = false, hess = false) cache = init(prob, HiGHS.Optimizer()) + @test cache isa OptimizationMOI.MOIOptimizationCache sol = solve!(cache) @test sol.u≈[1.0] rtol=1e-3 # ≈ [1] From 1b199e69fe2aa96d6788102df89b06fcf2034131 Mon Sep 17 00:00:00 2001 From: Valentin Kaisermayer Date: Sat, 15 Apr 2023 13:00:37 +0200 Subject: [PATCH 4/4] fix and formatting --- src/function/function.jl | 6 ++++-- src/function/mtk.jl | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/function/function.jl b/src/function/function.jl index 2d6810dbf..9028a9f46 100644 --- a/src/function/function.jl +++ b/src/function/function.jl @@ -68,7 +68,8 @@ function instantiate_function(f, x, ::AbstractADType, cons_jac_prototype = cons_jac_prototype, cons_hess_prototype = cons_hess_prototype, expr = expr, cons_expr = cons_expr, - syms = f.syms, paramsyms = f.paramsyms) + syms = f.syms, paramsyms = f.paramsyms, + observed = f.observed) end function instantiate_function(f, cache::ReInitCache, ::AbstractADType, @@ -96,5 +97,6 @@ function instantiate_function(f, cache::ReInitCache, ::AbstractADType, cons_jac_prototype = cons_jac_prototype, cons_hess_prototype = cons_hess_prototype, expr = expr, cons_expr = cons_expr, - syms = f.syms, paramsyms = f.paramsyms) + syms = f.syms, paramsyms = f.paramsyms, + observed = f.observed) end diff --git a/src/function/mtk.jl b/src/function/mtk.jl index f2ddbb84b..acb179e7c 100644 --- a/src/function/mtk.jl +++ b/src/function/mtk.jl @@ -78,7 +78,8 @@ function instantiate_function(f, x, adtype::AutoModelingToolkit, p, cons_jac_prototype = f.cons_jac_prototype, cons_hess_prototype = f.cons_hess_prototype, expr = symbolify(f.expr), - cons_expr = symbolify.(f.cons_expr)) + cons_expr = symbolify.(f.cons_expr), + observed = f.observed) end function instantiate_function(f, cache::ReInitCache, @@ -117,5 +118,6 @@ function instantiate_function(f, cache::ReInitCache, cons_jac_prototype = f.cons_jac_prototype, cons_hess_prototype = f.cons_hess_prototype, expr = symbolify(f.expr), - cons_expr = symbolify.(f.cons_expr)) + cons_expr = symbolify.(f.cons_expr), + observed = f.observed) end