Skip to content

Commit

Permalink
Merge pull request #715 from ParasPuneetSingh/master
Browse files Browse the repository at this point in the history
Update #711 checking if OptimizationFunction is used for derivative based optimizers #711
  • Loading branch information
Vaibhavdixit02 authored Apr 7, 2024
2 parents f7f6acb + e0610cd commit 8320ff3
Show file tree
Hide file tree
Showing 14 changed files with 77 additions and 3 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ Pkg = "1"
Printf = "1.10"
ProgressLogging = "0.1"
Reexport = "1.2"
SciMLBase = "2.23.0"
SciMLBase = "2.30.0"
SparseArrays = "1.10"
Symbolics = "5.12"
TerminalLoggers = "0.1"
Expand Down
2 changes: 1 addition & 1 deletion docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ OptimizationPolyalgorithms = "0.1, 0.2"
OptimizationSpeedMapping = "0.1, 0.2"
OrdinaryDiffEq = "6"
ReverseDiff = ">= 1.9.0"
SciMLBase = "2"
SciMLBase = "2.30.0"
SciMLSensitivity = "7"
Tracker = ">= 0.2"
Zygote = ">= 0.5"
2 changes: 2 additions & 0 deletions lib/OptimizationBBO/src/OptimizationBBO.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ SciMLBase.requiresbounds(::BBO) = true
SciMLBase.allowsbounds(::BBO) = true
SciMLBase.supports_opt_cache_interface(opt::BBO) = true



for j in string.(BlackBoxOptim.SingleObjectiveMethodNames)
eval(Meta.parse("Base.@kwdef struct BBO_" * j * " <: BBO method=:" * j * " end"))
eval(Meta.parse("export BBO_" * j))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ struct CMAEvolutionStrategyOpt end

SciMLBase.allowsbounds(::CMAEvolutionStrategyOpt) = true
SciMLBase.supports_opt_cache_interface(opt::CMAEvolutionStrategyOpt) = true
SciMLBase.requiresgradient(::CMAEvolutionStrategyOpt) = false
SciMLBase.requireshessian(::CMAEvolutionStrategyOpt) = false
SciMLBase.requiresconsjac(::CMAEvolutionStrategyOpt) = false
SciMLBase.requiresconshess(::CMAEvolutionStrategyOpt) = false

function __map_optimizer_args(prob::OptimizationCache, opt::CMAEvolutionStrategyOpt;
callback = nothing,
Expand Down
4 changes: 4 additions & 0 deletions lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ using Optimization.SciMLBase
SciMLBase.allowsbounds(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.allowsconstraints(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.supports_opt_cache_interface(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.requiresgradient(opt::Evolutionary.AbstractOptimizer) = false
SciMLBase.requireshessian(opt::Evolutionary.AbstractOptimizer) = false
SciMLBase.requiresconsjac(opt::Evolutionary.AbstractOptimizer) = false
SciMLBase.requiresconshess(opt::Evolutionary.AbstractOptimizer) = false

decompose_trace(trace::Evolutionary.OptimizationTrace) = last(trace)
decompose_trace(trace::Evolutionary.OptimizationTraceRecord) = trace
Expand Down
4 changes: 4 additions & 0 deletions lib/OptimizationFlux/src/OptimizationFlux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ using Reexport, Printf, ProgressLogging
using Optimization.SciMLBase

SciMLBase.supports_opt_cache_interface(opt::Flux.Optimise.AbstractOptimiser) = true
SciMLBase.requiresgradient(opt::Flux.Optimise.AbstractOptimiser) = true
SciMLBase.requireshessian(opt::Flux.Optimise.AbstractOptimiser) = false
SciMLBase.requiresconsjac(opt::Flux.Optimise.AbstractOptimiser) = false
SciMLBase.requiresconshess(opt::Flux.Optimise.AbstractOptimiser) = false

function SciMLBase.__init(prob::SciMLBase.OptimizationProblem,
opt::Flux.Optimise.AbstractOptimiser,
Expand Down
5 changes: 5 additions & 0 deletions lib/OptimizationGCMAES/src/OptimizationGCMAES.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@ SciMLBase.requiresbounds(::GCMAESOpt) = true
SciMLBase.allowsbounds(::GCMAESOpt) = true
SciMLBase.allowscallback(::GCMAESOpt) = false
SciMLBase.supports_opt_cache_interface(opt::GCMAESOpt) = true
SciMLBase.requiresgradient(::GCMAESOpt) = true
SciMLBase.requireshessian(::GCMAESOpt) = false
SciMLBase.requiresconsjac(::GCMAESOpt) = false
SciMLBase.requiresconshess(::GCMAESOpt) = false


function __map_optimizer_args(cache::OptimizationCache, opt::GCMAESOpt;
callback = nothing,
Expand Down
5 changes: 5 additions & 0 deletions lib/OptimizationMOI/src/OptimizationMOI.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ const MOI = MathOptInterface

const DenseOrSparse{T} = Union{Matrix{T}, SparseMatrixCSC{T}}

SciMLBase.requiresgradient(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true
SciMLBase.requireshessian(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true
SciMLBase.requiresconsjac(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true
SciMLBase.requiresconshess(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true

function SciMLBase.allowsbounds(opt::Union{MOI.AbstractOptimizer,
MOI.OptimizerWithAttributes})
true
Expand Down
37 changes: 37 additions & 0 deletions lib/OptimizationNLopt/src/OptimizationNLopt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,43 @@ using Optimization.SciMLBase
SciMLBase.allowsbounds(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true
SciMLBase.supports_opt_cache_interface(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true

function SciMLBase.requiresgradient(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if str_opt[2] == "D"
return true
else
return false
end
end

function SciMLBase.requireshessian(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if (str_opt[2] == "D" && str_opt[4] == "N")
return true
else
return false
end
end

function SciMLBase.requireshessian(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if str_opt[2] == "D" && str_opt[4] == "N"
return true
else
return false
end
end
function SciMLBase.requiresconsjac(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if str_opt[3] == "O" || str_opt[3] == "I" || str_opt[5] == "G"
return true
else
return false
end
end



function __map_optimizer_args!(cache::OptimizationCache, opt::NLopt.Opt;
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
Expand Down
8 changes: 7 additions & 1 deletion lib/OptimizationOptimJL/src/OptimizationOptimJL.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,13 @@ SciMLBase.requiresbounds(opt::Optim.SAMIN) = true
SciMLBase.supports_opt_cache_interface(opt::Optim.AbstractOptimizer) = true
SciMLBase.supports_opt_cache_interface(opt::Union{Optim.Fminbox, Optim.SAMIN}) = true
SciMLBase.supports_opt_cache_interface(opt::Optim.ConstrainedOptimizer) = true
SciMLBase.requiresgradient(opt::Optim.AbstractOptimizer) = !(opt isa Optim.ZerothOrderOptimizer)
SciMLBase.requiresgradient(::IPNewton) = true
SciMLBase.requireshessian(::IPNewton) = true
SciMLBase.requiresconsjac(::IPNewton) = true
SciMLBase.requireshessian(opt::Optim.NewtonTrustRegion) = true
SciMLBase.requireshessian(opt::Optim.Newton) = true
SciMLBase.requiresgradient(opt::Optim.Fminbox) = true

function __map_optimizer_args(cache::OptimizationCache,
opt::Union{Optim.AbstractOptimizer, Optim.Fminbox,
Expand Down Expand Up @@ -128,7 +135,6 @@ function SciMLBase.__solve(cache::OptimizationCache{
local x, cur, state

cur, state = iterate(cache.data)

!(cache.opt isa Optim.ZerothOrderOptimizer) && cache.f.grad === nothing &&
error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")

Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationOptimisers/src/OptimizationOptimisers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ using Reexport, Printf, ProgressLogging
using Optimization.SciMLBase

SciMLBase.supports_opt_cache_interface(opt::AbstractRule) = true
SciMLBase.requiresgradient(opt::AbstractRule) = true
include("sophia.jl")

function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, opt::AbstractRule,
Expand Down
3 changes: 3 additions & 0 deletions lib/OptimizationPRIMA/src/OptimizationPRIMA.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ SciMLBase.supports_opt_cache_interface(::PRIMASolvers) = true
SciMLBase.allowsconstraints(::Union{LINCOA, COBYLA}) = true
SciMLBase.allowsbounds(opt::Union{BOBYQA, LINCOA, COBYLA}) = true
SciMLBase.requiresconstraints(opt::COBYLA) = true
SciMLBase.requiresgradient(opt::Union{BOBYQA, LINCOA, COBYLA}) = true
SciMLBase.requiresconsjac(opt::Union{LINCOA, COBYLA}) = true


function Optimization.OptimizationCache(prob::SciMLBase.OptimizationProblem,
opt::PRIMASolvers, data;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ using Optimization.SciMLBase, OptimizationOptimJL, OptimizationOptimisers

struct PolyOpt end

SciMLBase.requiresgradient(opt::PolyOpt) = true

function SciMLBase.__solve(prob::OptimizationProblem,
opt::PolyOpt,
args...;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ struct SpeedMappingOpt end
SciMLBase.allowsbounds(::SpeedMappingOpt) = true
SciMLBase.allowscallback(::SpeedMappingOpt) = false
SciMLBase.supports_opt_cache_interface(opt::SpeedMappingOpt) = true
SciMLBase.requiresgradient(opt::SpeedMappingOpt) = true

function __map_optimizer_args(cache::OptimizationCache, opt::SpeedMappingOpt;
callback = nothing,
Expand Down

0 comments on commit 8320ff3

Please sign in to comment.