Skip to content

Commit

Permalink
Merge branch 'master' into vk-moi-no-nlp-block
Browse files Browse the repository at this point in the history
  • Loading branch information
Vaibhavdixit02 authored Apr 6, 2023
2 parents cb4858b + 0614bc6 commit 5f4b05e
Show file tree
Hide file tree
Showing 23 changed files with 806 additions and 329 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "Optimization"
uuid = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
version = "3.12.1"
version = "3.13.0"

[deps]
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
Expand Down
1 change: 1 addition & 0 deletions docs/pages.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,6 @@ pages = ["index.md",
"Optim.jl" => "optimization_packages/optim.md",
"Optimisers.jl" => "optimization_packages/optimisers.md",
"QuadDIRECT.jl" => "optimization_packages/quaddirect.md",
"SpeedMapping.jl" => "optimization_packages/speedmapping.md",
],
]
4 changes: 2 additions & 2 deletions docs/src/examples/rosenbrock.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,15 @@ sol = solve(prob, IPNewton())
prob = OptimizationProblem(optf, x0, _p, lcons = [0.5], ucons = [0.5],
lb = [-500.0, -500.0], ub = [50.0, 50.0])
sol = solve(prob, IPNewton()) # Notice now that x[1]^2 + x[2]^2 ≈ 0.5:
# cons(sol.minimizer, _p) = 0.49999999999999994
# cons(sol.u, _p) = 0.49999999999999994
function con_c(res, x, p)
res .= [x[1]^2 + x[2]^2]
end
optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff(); cons = con_c)
prob = OptimizationProblem(optf, x0, _p, lcons = [-Inf], ucons = [0.25^2])
sol = solve(prob, IPNewton()) # -Inf < cons_circ(sol.minimizer, _p) = 0.25^2
sol = solve(prob, IPNewton()) # -Inf < cons_circ(sol.u, _p) = 0.25^2
function con2_c(res, x, p)
res .= [x[1]^2 + x[2]^2, x[2] * sin(x[1]) - x[1]]
Expand Down
99 changes: 77 additions & 22 deletions lib/OptimizationFlux/src/OptimizationFlux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,43 +4,99 @@ using Reexport, Printf, ProgressLogging
@reexport using Flux, Optimization
using Optimization.SciMLBase

function SciMLBase.__solve(prob::OptimizationProblem, opt::Flux.Optimise.AbstractOptimiser,
data = Optimization.DEFAULT_DATA;
maxiters::Number = 0, callback = (args...) -> (false),
progress = false, save_best = true, kwargs...)
if data != Optimization.DEFAULT_DATA
maxiters = length(data)
struct FluxOptimizationCache{F <: OptimizationFunction, RC, O, D} <:
SciMLBase.AbstractOptimizationCache
f::F
reinit_cache::RC
opt::O
data::D
solver_args::NamedTuple
end

function Base.getproperty(cache::FluxOptimizationCache, x::Symbol)
if x in fieldnames(Optimization.ReInitCache)
return getfield(cache.reinit_cache, x)
end
return getfield(cache, x)
end

function FluxOptimizationCache(prob::OptimizationProblem, opt, data; kwargs...)
reinit_cache = Optimization.ReInitCache(prob.u0, prob.p) # everything that can be changed via `reinit`
f = Optimization.instantiate_function(prob.f, reinit_cache, prob.f.adtype)
return FluxOptimizationCache(f, reinit_cache, opt, data, NamedTuple(kwargs))
end

SciMLBase.supports_opt_cache_interface(opt::Flux.Optimise.AbstractOptimiser) = true
SciMLBase.has_reinit(cache::FluxOptimizationCache) = true
function SciMLBase.reinit!(cache::FluxOptimizationCache; p = missing, u0 = missing)
if p === missing && u0 === missing
p, u0 = cache.p, cache.u0
else # at least one of them has a value
if p === missing
p = cache.p
end
if u0 === missing
u0 = cache.u0
end
if (eltype(p) <: Pair && !isempty(p)) || (eltype(u0) <: Pair && !isempty(u0)) # one is a non-empty symbolic map
hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :ps) ||
throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." *
" Please use `remake` with the `p` keyword argument as a vector of values, paying attention to parameter order."))
hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :states) ||
throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." *
" Please use `remake` with the `u0` keyword argument as a vector of values, paying attention to state order."))
p, u0 = SciMLBase.process_p_u0_symbolic(cache, p, u0)
end
end

cache.reinit_cache.p = p
cache.reinit_cache.u0 = u0

return cache
end

function SciMLBase.__init(prob::OptimizationProblem, opt::Flux.Optimise.AbstractOptimiser,
data = Optimization.DEFAULT_DATA;
maxiters::Number = 0, callback = (args...) -> (false),
progress = false, save_best = true, kwargs...)
return FluxOptimizationCache(prob, opt, data; maxiters, callback, progress, save_best,
kwargs...)
end

function SciMLBase.__solve(cache::FluxOptimizationCache)
if cache.data != Optimization.DEFAULT_DATA
maxiters = length(cache.data)
data = cache.data
else
maxiters = Optimization._check_and_convert_maxiters(maxiters)
data = Optimization.take(data, maxiters)
maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters)
data = Optimization.take(cache.data, maxiters)
end

# Flux is silly and doesn't have an abstract type on its optimizers, so assume
# this is a Flux optimizer
θ = copy(prob.u0)
θ = copy(cache.u0)
G = copy(θ)
opt = deepcopy(cache.opt)

local x, min_err, min_θ
min_err = typemax(eltype(prob.u0)) #dummy variables
min_err = typemax(eltype(cache.u0)) #dummy variables
min_opt = 1
min_θ = prob.u0

f = Optimization.instantiate_function(prob.f, prob.u0, prob.f.adtype, prob.p)
min_θ = cache.u0

t0 = time()
Optimization.@withprogress progress name="Training" begin for (i, d) in enumerate(data)
f.grad(G, θ, d...)
x = f.f(θ, prob.p, d...)
cb_call = callback(θ, x...)
Optimization.@withprogress cache.solver_args.progress name="Training" begin for (i, d) in enumerate(data)
cache.f.grad(G, θ, d...)
x = cache.f.f(θ, cache.p, d...)
cb_call = cache.solver_args.callback(θ, x...)
if !(typeof(cb_call) <: Bool)
error("The callback should return a boolean `halt` for whether to stop the optimization process. Please see the sciml_train documentation for information.")
elseif cb_call
break
end
msg = @sprintf("loss: %.3g", x[1])
progress && ProgressLogging.@logprogress msg i/maxiters
cache.solver_args.progress && ProgressLogging.@logprogress msg i/maxiters

if save_best
if cache.solver_args.save_best
if first(x) < first(min_err) #found a better solution
min_opt = opt
min_err = x
Expand All @@ -50,7 +106,7 @@ function SciMLBase.__solve(prob::OptimizationProblem, opt::Flux.Optimise.Abstrac
opt = min_opt
x = min_err
θ = min_θ
callback(θ, x...)
cache.solver_args.callback(θ, x...)
break
end
end
Expand All @@ -59,8 +115,7 @@ function SciMLBase.__solve(prob::OptimizationProblem, opt::Flux.Optimise.Abstrac

t1 = time()

SciMLBase.build_solution(SciMLBase.DefaultOptimizationCache(prob.f, prob.p), opt, θ,
x[1], solve_time = t1 - t0)
SciMLBase.build_solution(cache, opt, θ, x[1], solve_time = t1 - t0)
# here should be build_solution to create the output message
end

Expand Down
21 changes: 19 additions & 2 deletions lib/OptimizationFlux/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,27 @@ using Test

prob = OptimizationProblem(optprob, x0, _p)

sol = Optimization.solve(prob, Flux.ADAM(0.1), maxiters = 1000)
sol = Optimization.solve(prob, Flux.Adam(0.1), maxiters = 1000)
@test 10 * sol.objective < l1

prob = OptimizationProblem(optprob, x0, _p)
sol = solve(prob, Flux.ADAM(), maxiters = 1000, progress = false)
sol = solve(prob, Flux.Adam(), maxiters = 1000, progress = false)
@test 10 * sol.objective < l1

@testset "cache" begin
objective(x, p) = (p[1] - x[1])^2
x0 = zeros(1)
p = [1.0]

prob = OptimizationProblem(OptimizationFunction(objective,
Optimization.AutoForwardDiff()), x0,
p)
cache = Optimization.init(prob, Flux.Adam(0.1), maxiters = 1000)
sol = Optimization.solve!(cache)
@test sol.u[1.0] atol=1e-3

cache = Optimization.reinit!(cache; p = [2.0])
sol = Optimization.solve!(cache)
@test sol.u[2.0] atol=1e-3
end
end
116 changes: 90 additions & 26 deletions lib/OptimizationGCMAES/src/OptimizationGCMAES.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,65 @@ struct GCMAESOpt end
SciMLBase.requiresbounds(::GCMAESOpt) = true
SciMLBase.allowsbounds(::GCMAESOpt) = true

function __map_optimizer_args(prob::OptimizationProblem, opt::GCMAESOpt;
struct GCMAESOptimizationCache{F <: OptimizationFunction, RC, LB, UB, S, O, P, S0} <:
SciMLBase.AbstractOptimizationCache
f::F
reinit_cache::RC
lb::LB
ub::UB
sense::S
opt::O
progress::P
sigma0::S0
solver_args::NamedTuple
end

function Base.getproperty(cache::GCMAESOptimizationCache, x::Symbol)
if x in fieldnames(Optimization.ReInitCache)
return getfield(cache.reinit_cache, x)
end
return getfield(cache, x)
end

function GCMAESOptimizationCache(prob::OptimizationProblem, opt; progress, sigma0,
kwargs...)
reinit_cache = Optimization.ReInitCache(prob.u0, prob.p) # everything that can be changed via `reinit`
f = Optimization.instantiate_function(prob.f, reinit_cache, prob.f.adtype)
return GCMAESOptimizationCache(f, reinit_cache, prob.lb, prob.ub, prob.sense, opt,
progress, sigma0,
NamedTuple(kwargs))
end

SciMLBase.supports_opt_cache_interface(opt::GCMAESOpt) = true
SciMLBase.has_reinit(cache::GCMAESOptimizationCache) = true
function SciMLBase.reinit!(cache::GCMAESOptimizationCache; p = missing, u0 = missing)
if p === missing && u0 === missing
p, u0 = cache.p, cache.u0
else # at least one of them has a value
if p === missing
p = cache.p
end
if u0 === missing
u0 = cache.u0
end
if (eltype(p) <: Pair && !isempty(p)) || (eltype(u0) <: Pair && !isempty(u0)) # one is a non-empty symbolic map
hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :ps) ||
throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." *
" Please use `remake` with the `p` keyword argument as a vector of values, paying attention to parameter order."))
hasproperty(cache.f, :sys) && hasfield(typeof(cache.f.sys), :states) ||
throw(ArgumentError("This cache does not support symbolic maps with `remake`, i.e. it does not have a symbolic origin." *
" Please use `remake` with the `u0` keyword argument as a vector of values, paying attention to state order."))
p, u0 = SciMLBase.process_p_u0_symbolic(cache, p, u0)
end
end

cache.reinit_cache.p = p
cache.reinit_cache.u0 = u0

return cache
end

function __map_optimizer_args(cache::GCMAESOptimizationCache, opt::GCMAESOpt;
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
Expand Down Expand Up @@ -40,50 +98,56 @@ function __map_optimizer_args(prob::OptimizationProblem, opt::GCMAESOpt;
return mapped_args
end

function SciMLBase.__solve(prob::OptimizationProblem, opt::GCMAESOpt;
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing,
progress = false,
σ0 = 0.2,
kwargs...)
local x
local G = similar(prob.u0)

function SciMLBase.__init(prob::OptimizationProblem, opt::GCMAESOpt;
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing,
progress = false,
σ0 = 0.2,
kwargs...)
maxiters = Optimization._check_and_convert_maxiters(maxiters)
maxtime = Optimization._check_and_convert_maxtime(maxtime)
return GCMAESOptimizationCache(prob, opt; maxiters, maxtime, abstol, reltol, progress,
sigma0 = σ0, kwargs...)
end

f = Optimization.instantiate_function(prob.f, prob.u0, prob.f.adtype, prob.p)
function SciMLBase.__solve(cache::GCMAESOptimizationCache)
local x
local G = similar(cache.u0)

_loss = function (θ)
x = f.f(θ, prob.p)
x = cache.f.f(θ, cache.p)
return x[1]
end

if !isnothing(f.grad)
if !isnothing(cache.f.grad)
g = function (θ)
f.grad(G, θ)
cache.f.grad(G, θ)
return G
end
end

opt_args = __map_optimizer_args(prob, opt, maxiters = maxiters, maxtime = maxtime,
abstol = abstol, reltol = reltol; kwargs...)
opt_args = __map_optimizer_args(cache, cache.opt, maxiters = cache.solver_args.maxiters,
maxtime = cache.solver_args.maxtime,
abstol = cache.solver_args.abstol,
reltol = cache.solver_args.reltol; cache.solver_args...)

t0 = time()
if prob.sense === Optimization.MaxSense
opt_xmin, opt_fmin, opt_ret = GCMAES.maximize(isnothing(f.grad) ? _loss :
(_loss, g), prob.u0, σ0, prob.lb,
prob.ub; opt_args...)
if cache.sense === Optimization.MaxSense
opt_xmin, opt_fmin, opt_ret = GCMAES.maximize(isnothing(cache.f.grad) ? _loss :
(_loss, g), cache.u0,
cache.sigma0, cache.lb,
cache.ub; opt_args...)
else
opt_xmin, opt_fmin, opt_ret = GCMAES.minimize(isnothing(f.grad) ? _loss :
(_loss, g), prob.u0, σ0, prob.lb,
prob.ub; opt_args...)
opt_xmin, opt_fmin, opt_ret = GCMAES.minimize(isnothing(cache.f.grad) ? _loss :
(_loss, g), cache.u0,
cache.sigma0, cache.lb,
cache.ub; opt_args...)
end
t1 = time()

SciMLBase.build_solution(SciMLBase.DefaultOptimizationCache(prob.f, prob.p), opt,
SciMLBase.build_solution(cache, cache.opt,
opt_xmin, opt_fmin; retcode = Symbol(Bool(opt_ret)),
solve_time = t1 - t0)
end
Expand Down
15 changes: 15 additions & 0 deletions lib/OptimizationGCMAES/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,19 @@ using Test
ub = [1.0, 1.0])
sol = solve(prob, GCMAESOpt(), maxiters = 1000)
@test 10 * sol.objective < l1

@testset "cache" begin
objective(x, p) = (p[1] - x[1])^2
x0 = zeros(1)
p = [1.0]

prob = OptimizationProblem(objective, x0, p, lb = [-10.0], ub = [10.0])
cache = Optimization.init(prob, GCMAESOpt())
sol = Optimization.solve!(cache)
@test sol.u[1.0] atol=1e-3

cache = Optimization.reinit!(cache; p = [2.0])
sol = Optimization.solve!(cache)
@test sol.u[2.0] atol=1e-3
end
end
4 changes: 3 additions & 1 deletion lib/OptimizationMOI/Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "OptimizationMOI"
uuid = "fd9f6733-72f4-499f-8506-86b2bdd0dea1"
authors = ["Vaibhav Dixit <[email protected]> and contributors"]
version = "0.1.8"
version = "0.1.11"

[deps]
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Expand All @@ -10,8 +10,10 @@ Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7"

[compat]
Ipopt_jll = "=300.1400.400"
MathOptInterface = "1"
Juniper = "0.9"
Optimization = "3.9"
Expand Down
Loading

0 comments on commit 5f4b05e

Please sign in to comment.