Skip to content

Commit

Permalink
Format
Browse files Browse the repository at this point in the history
  • Loading branch information
Vaibhavdixit02 committed Dec 26, 2023
1 parent 9eacb81 commit 6687491
Show file tree
Hide file tree
Showing 33 changed files with 594 additions and 585 deletions.
1 change: 1 addition & 0 deletions docs/src/API/optimization_function.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ The choices for the auto-AD fill-ins with quick descriptions are:
- `AutoFiniteDiff()`: Finite differencing, not optimal but always applicable
- `AutoModelingToolkit()`: The fastest choice for large scalar optimizations
- `AutoEnzyme()`: Highly performant AD choice for type stable and optimized code

## Automatic Differentiation Choice API

The following sections describe the Auto-AD choices in detail.
Expand Down
3 changes: 2 additions & 1 deletion docs/src/optimization_packages/polyopt.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# OptimizationPolyalgorithms.jl

OptimizationPolyalgorithms.jl is a package for collecting polyalgorithms formed by fusing popular optimization solvers of different characteristics.
OptimizationPolyalgorithms.jl is a package for collecting polyalgorithms formed by fusing popular optimization solvers of different characteristics.

## Installation: OptimizationPolyalgorithms

Expand All @@ -10,6 +10,7 @@ To use this package, install the OptimizationPolyalgorithms package:
import Pkg;
Pkg.add("OptimizationPolyalgorithms");
```

## Algorithms

Right now we support the following polyalgorithms.
Expand Down
10 changes: 5 additions & 5 deletions ext/OptimizationEnzymeExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ isdefined(Base, :get_extension) ? (using Enzyme) : (using ..Enzyme)
end

function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
adtype::AutoEnzyme, p,
num_cons = 0)
adtype::AutoEnzyme, p,
num_cons = 0)
if f.grad === nothing
grad = let
function (res, θ, args...)
Expand Down Expand Up @@ -154,9 +154,9 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
end

function Optimization.instantiate_function(f::OptimizationFunction{true},
cache::Optimization.ReInitCache,
adtype::AutoEnzyme,
num_cons = 0)
cache::Optimization.ReInitCache,
adtype::AutoEnzyme,
num_cons = 0)
p = cache.p

if f.grad === nothing
Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationFiniteDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ isdefined(Base, :get_extension) ? (using FiniteDiff) : (using ..FiniteDiff)
const FD = FiniteDiff

function Optimization.instantiate_function(f, x, adtype::AutoFiniteDiff, p,
num_cons = 0)
num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, p, args...))
updatecache = (cache, x) -> (cache.xmm .= x; cache.xmp .= x; cache.xpm .= x; cache.xpp .= x; return cache)

Expand Down Expand Up @@ -117,7 +117,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoFiniteDiff, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoFiniteDiff, num_cons = 0)
adtype::AutoFiniteDiff, num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))
updatecache = (cache, x) -> (cache.xmm .= x; cache.xmp .= x; cache.xpm .= x; cache.xpp .= x; return cache)

Expand Down
10 changes: 5 additions & 5 deletions ext/OptimizationForwardDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ function default_chunk_size(len)
end

function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
adtype::AutoForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
adtype::AutoForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
chunksize = _chunksize === nothing ? default_chunk_size(length(x)) : _chunksize

_f = (θ, args...) -> first(f.f(θ, p, args...))
Expand Down Expand Up @@ -90,9 +90,9 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
end

function Optimization.instantiate_function(f::OptimizationFunction{true},
cache::Optimization.ReInitCache,
adtype::AutoForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
cache::Optimization.ReInitCache,
adtype::AutoForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
chunksize = _chunksize === nothing ? default_chunk_size(length(cache.u0)) : _chunksize

_f = (θ, args...) -> first(f.f(θ, cache.p, args...))
Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationMTKExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import Optimization.ADTypes: AutoModelingToolkit
isdefined(Base, :get_extension) ? (using ModelingToolkit) : (using ..ModelingToolkit)

function Optimization.instantiate_function(f, x, adtype::AutoModelingToolkit, p,
num_cons = 0)
num_cons = 0)
p = isnothing(p) ? SciMLBase.NullParameters() : p

sys = ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p;
Expand Down Expand Up @@ -52,7 +52,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoModelingToolkit, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoModelingToolkit, num_cons = 0)
adtype::AutoModelingToolkit, num_cons = 0)
p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p

sys = ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, cache.p;
Expand Down
6 changes: 3 additions & 3 deletions ext/OptimizationReverseDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ function default_chunk_size(len)
end

function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
p = SciMLBase.NullParameters(),
num_cons = 0)
p = SciMLBase.NullParameters(),
num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, p, args...))

chunksize = default_chunk_size(length(x))
Expand Down Expand Up @@ -151,7 +151,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoReverseDiff, num_cons = 0)
adtype::AutoReverseDiff, num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))

chunksize = default_chunk_size(length(cache.u0))
Expand Down
20 changes: 10 additions & 10 deletions ext/OptimizationSparseDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ function default_chunk_size(len)
end

function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
adtype::AutoSparseForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
adtype::AutoSparseForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -127,9 +127,9 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
end

function Optimization.instantiate_function(f::OptimizationFunction{true},
cache::Optimization.ReInitCache,
adtype::AutoSparseForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
cache::Optimization.ReInitCache,
adtype::AutoSparseForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -237,7 +237,7 @@ end
const FD = FiniteDiff

function Optimization.instantiate_function(f, x, adtype::AutoSparseFiniteDiff, p,
num_cons = 0)
num_cons = 0)
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -362,7 +362,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseFiniteDiff, p
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoSparseFiniteDiff, num_cons = 0)
adtype::AutoSparseFiniteDiff, num_cons = 0)
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -493,8 +493,8 @@ end
struct OptimizationSparseReverseTag end

function Optimization.instantiate_function(f, x, adtype::AutoSparseReverseDiff,
p = SciMLBase.NullParameters(),
num_cons = 0)
p = SciMLBase.NullParameters(),
num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, p, args...))

chunksize = default_chunk_size(length(x))
Expand Down Expand Up @@ -668,7 +668,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseReverseDiff,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoSparseReverseDiff, num_cons = 0)
adtype::AutoSparseReverseDiff, num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))

chunksize = default_chunk_size(length(cache.u0))
Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationTrackerExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import Optimization.ADTypes: AutoTracker
isdefined(Base, :get_extension) ? (using Tracker) : (using ..Tracker)

function Optimization.instantiate_function(f, x, adtype::AutoTracker, p,
num_cons = 0)
num_cons = 0)
num_cons != 0 && error("AutoTracker does not currently support constraints")
_f = (θ, args...) -> first(f.f(θ, p, args...))

Expand Down Expand Up @@ -37,7 +37,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoTracker, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoTracker, num_cons = 0)
adtype::AutoTracker, num_cons = 0)
num_cons != 0 && error("AutoTracker does not currently support constraints")
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))

Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationZygoteExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ isdefined(Base, :get_extension) ? (using Zygote, Zygote.ForwardDiff) :
(using ..Zygote, ..Zygote.ForwardDiff)

function Optimization.instantiate_function(f, x, adtype::AutoZygote, p,
num_cons = 0)
num_cons = 0)
_f = (θ, args...) -> f(θ, p, args...)[1]
if f.grad === nothing
grad = function (res, θ, args...)
Expand Down Expand Up @@ -84,7 +84,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoZygote, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoZygote, num_cons = 0)
adtype::AutoZygote, num_cons = 0)
_f = (θ, args...) -> f(θ, cache.p, args...)[1]
if f.grad === nothing
grad = function (res, θ, args...)
Expand Down
60 changes: 30 additions & 30 deletions lib/OptimizationBBO/src/OptimizationBBO.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@ function decompose_trace(opt::BlackBoxOptim.OptRunController, progress)
end

function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO;
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing,
verbose::Bool = false,
kwargs...)
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing,
verbose::Bool = false,
kwargs...)
if !isnothing(reltol)
@warn "common reltol is currently not used by $(opt)"
end
Expand Down Expand Up @@ -78,30 +78,30 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO;
end

function SciMLBase.__solve(cache::Optimization.OptimizationCache{
F,
RC,
LB,
UB,
LC,
UC,
S,
O,
D,
P,
C,
F,
RC,
LB,
UB,
LC,
UC,
S,
O,
D,
P,
C,
}) where {
F,
RC,
LB,
UB,
LC,
UC,
S,
O <:
BBO,
D,
P,
C,
F,
RC,
LB,
UB,
LC,
UC,
S,
O <:
BBO,
D,
P,
C,
}
local x, cur, state

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@ SciMLBase.allowscallback(::CMAEvolutionStrategyOpt) = false #looks like `logger`
SciMLBase.supports_opt_cache_interface(opt::CMAEvolutionStrategyOpt) = true

function __map_optimizer_args(prob::OptimizationCache, opt::CMAEvolutionStrategyOpt;
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing)
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing)
if !isnothing(reltol)
@warn "common reltol is currently not used by $(opt)"
end
Expand All @@ -41,30 +41,30 @@ function __map_optimizer_args(prob::OptimizationCache, opt::CMAEvolutionStrategy
end

function SciMLBase.__solve(cache::OptimizationCache{
F,
RC,
LB,
UB,
LC,
UC,
S,
O,
D,
P,
C,
F,
RC,
LB,
UB,
LC,
UC,
S,
O,
D,
P,
C,
}) where {
F,
RC,
LB,
UB,
LC,
UC,
S,
O <:
CMAEvolutionStrategyOpt,
D,
P,
C,
F,
RC,
LB,
UB,
LC,
UC,
S,
O <:
CMAEvolutionStrategyOpt,
D,
P,
C,
}
local x, cur, state

Expand Down
Loading

0 comments on commit 6687491

Please sign in to comment.