From c4d9714b1a731e4cd67ce9ee009c4c979bf714da Mon Sep 17 00:00:00 2001 From: Vaibhav Dixit Date: Tue, 10 Sep 2024 16:18:08 -0400 Subject: [PATCH] remove data from all sub libs --- docs/src/index.md | 1 - lib/OptimizationBBO/src/OptimizationBBO.jl | 38 +----- .../src/OptimizationCMAEvolutionStrategy.jl | 7 -- .../src/OptimizationEvolutionary.jl | 11 +- lib/OptimizationFlux/LICENSE | 21 ---- lib/OptimizationFlux/Project.toml | 25 ---- lib/OptimizationFlux/src/OptimizationFlux.jl | 115 ------------------ lib/OptimizationFlux/test/runtests.jl | 46 ------- .../src/OptimizationGCMAES.jl | 5 +- .../src/OptimizationManopt.jl | 16 +-- .../src/OptimizationMetaheuristics.jl | 5 +- .../src/OptimizationMultistartOptimization.jl | 5 +- lib/OptimizationOptimJL/test/runtests.jl | 4 +- .../src/OptimizationOptimisers.jl | 3 +- .../src/OptimizationPRIMA.jl | 4 +- 15 files changed, 18 insertions(+), 288 deletions(-) delete mode 100644 lib/OptimizationFlux/LICENSE delete mode 100644 lib/OptimizationFlux/Project.toml delete mode 100644 lib/OptimizationFlux/src/OptimizationFlux.jl delete mode 100644 lib/OptimizationFlux/test/runtests.jl diff --git a/docs/src/index.md b/docs/src/index.md index 010aef6e4..a905e5439 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -200,7 +200,6 @@ Pkg.status() # hide using InteractiveUtils # hide versioninfo() # hide ``` - ```@raw html ``` diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 1b0c6e48f..71de2fcd6 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -111,12 +111,6 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ } local x, cur, state - if cache.data != Optimization.DEFAULT_DATA - maxiters = length(cache.data) - end - - cur, state = iterate(cache.data) - function _cb(trace) if cache.callback === Optimization.DEFAULT_CALLBACK cb_call = false @@ -138,9 +132,6 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ BlackBoxOptim.shutdown_optimizer!(trace) #doesn't work end - if cache.data !== Optimization.DEFAULT_DATA - cur, state = iterate(cache.data, state) - end cb_call end @@ -149,37 +140,14 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ _loss = function (θ) if isa(cache.f, MultiObjectiveOptimizationFunction) - if cache.callback === Optimization.DEFAULT_CALLBACK && - cache.data === Optimization.DEFAULT_DATA - return cache.f(θ, cache.p) - elseif cache.callback === Optimization.DEFAULT_CALLBACK - return cache.f(θ, cache.p, cur...) - elseif cache.data !== Optimization.DEFAULT_DATA - x = cache.f(θ, cache.p) - return x - else - x = cache.f(θ, cache.p, cur...) - return first(x) - end + return cache.f(θ, cache.p) else - if cache.callback === Optimization.DEFAULT_CALLBACK && - cache.data === Optimization.DEFAULT_DATA - return first(cache.f(θ, cache.p)) - elseif cache.callback === Optimization.DEFAULT_CALLBACK - return first(cache.f(θ, cache.p, cur...)) - elseif cache.data !== Optimization.DEFAULT_DATA - x = cache.f(θ, cache.p) - return first(x) - else - x = cache.f(θ, cache.p, cur...) - return first(x) - end + return first(cache.f(θ, cache.p)) end end opt_args = __map_optimizer_args(cache, cache.opt; - callback = cache.callback === Optimization.DEFAULT_CALLBACK && - cache.data === Optimization.DEFAULT_DATA ? + callback = cache.callback === Optimization.DEFAULT_CALLBACK ? nothing : _cb, cache.solver_args..., maxiters = maxiters, diff --git a/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl b/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl index 3fcc1cf1f..d7c49a14c 100644 --- a/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl +++ b/lib/OptimizationCMAEvolutionStrategy/src/OptimizationCMAEvolutionStrategy.jl @@ -74,12 +74,6 @@ function SciMLBase.__solve(cache::OptimizationCache{ } local x, cur, state - if cache.data != Optimization.DEFAULT_DATA - maxiters = length(cache.data) - end - - cur, state = iterate(cache.data) - function _cb(opt, y, fvals, perm) curr_u = opt.logger.xbest[end] opt_state = Optimization.OptimizationState(; iter = length(opt.logger.fmedian), @@ -91,7 +85,6 @@ function SciMLBase.__solve(cache::OptimizationCache{ if !(cb_call isa Bool) error("The callback should return a boolean `halt` for whether to stop the optimization process.") end - cur, state = iterate(cache.data, state) cb_call end diff --git a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl index eea090cdf..d491d2859 100644 --- a/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl +++ b/lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl @@ -99,12 +99,6 @@ function SciMLBase.__solve(cache::OptimizationCache{ } local x, cur, state - if cache.data != Optimization.DEFAULT_DATA - maxiters = length(cache.data) - end - - cur, state = iterate(cache.data) - function _cb(trace) curr_u = decompose_trace(trace).metadata["curr_u"] opt_state = Optimization.OptimizationState(; @@ -116,7 +110,6 @@ function SciMLBase.__solve(cache::OptimizationCache{ if !(cb_call isa Bool) error("The callback should return a boolean `halt` for whether to stop the optimization process.") end - cur, state = iterate(cache.data, state) cb_call end @@ -127,10 +120,10 @@ function SciMLBase.__solve(cache::OptimizationCache{ _loss = function (θ) if isa(f, MultiObjectiveOptimizationFunction) - x = f(θ, cache.p, cur...) + x = f(θ, cache.p) return x else - x = f(θ, cache.p, cur...) + x = f(θ, cache.p) return first(x) end end diff --git a/lib/OptimizationFlux/LICENSE b/lib/OptimizationFlux/LICENSE deleted file mode 100644 index fd2b2d24a..000000000 --- a/lib/OptimizationFlux/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Vaibhav Dixit and contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/lib/OptimizationFlux/Project.toml b/lib/OptimizationFlux/Project.toml deleted file mode 100644 index 6353669f9..000000000 --- a/lib/OptimizationFlux/Project.toml +++ /dev/null @@ -1,25 +0,0 @@ -name = "OptimizationFlux" -uuid = "253f991c-a7b2-45f8-8852-8b9a9df78a86" -authors = ["Vaibhav Dixit and contributors"] -version = "0.2.1" - -[deps] -Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" -Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" -Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" -ProgressLogging = "33c8b6b6-d38a-422a-b730-caa89a2f386c" -Reexport = "189a3867-3050-52da-a836-e630ba90ab69" - -[compat] -julia = "1" -Flux = "0.13, 0.14" -ProgressLogging = "0.1" -Reexport = "1.2" -Optimization = "3.21" - -[extras] -ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" -Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - -[targets] -test = ["ForwardDiff","Test"] diff --git a/lib/OptimizationFlux/src/OptimizationFlux.jl b/lib/OptimizationFlux/src/OptimizationFlux.jl deleted file mode 100644 index f81f5528e..000000000 --- a/lib/OptimizationFlux/src/OptimizationFlux.jl +++ /dev/null @@ -1,115 +0,0 @@ -module OptimizationFlux - -using Reexport, Printf, ProgressLogging -@reexport using Flux, Optimization -using Optimization.SciMLBase - -SciMLBase.supports_opt_cache_interface(opt::Flux.Optimise.AbstractOptimiser) = true -SciMLBase.requiresgradient(opt::Flux.Optimise.AbstractOptimiser) = true -SciMLBase.requireshessian(opt::Flux.Optimise.AbstractOptimiser) = false -SciMLBase.requiresconsjac(opt::Flux.Optimise.AbstractOptimiser) = false -SciMLBase.requiresconshess(opt::Flux.Optimise.AbstractOptimiser) = false - -function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, - opt::Flux.Optimise.AbstractOptimiser, - data = Optimization.DEFAULT_DATA; save_best = true, - callback = (args...) -> (false), - progress = false, kwargs...) - return OptimizationCache(prob, opt, data; save_best, callback, progress, - kwargs...) -end - -function SciMLBase.__solve(cache::OptimizationCache{ - F, - RC, - LB, - UB, - LC, - UC, - S, - O, - D, - P, - C -}) where { - F, - RC, - LB, - UB, - LC, - UC, - S, - O <: - Flux.Optimise.AbstractOptimiser, - D, - P, - C -} - local i - if cache.data != Optimization.DEFAULT_DATA - maxiters = length(cache.data) - data = cache.data - else - maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters) - data = Optimization.take(cache.data, maxiters) - end - - # Flux is silly and doesn't have an abstract type on its optimizers, so assume - # this is a Flux optimizer - θ = copy(cache.u0) - G = copy(θ) - opt = deepcopy(cache.opt) - - local x, min_err, min_θ - min_err = typemax(eltype(cache.u0)) #dummy variables - min_opt = 1 - min_θ = cache.u0 - - t0 = time() - Optimization.@withprogress cache.progress name="Training" begin - for (i, d) in enumerate(data) - cache.f.grad(G, θ, d...) - x = cache.f(θ, cache.p, d...) - opt_state = Optimization.OptimizationState(; iter = i, - u = θ, - objective = x[1], - original = opt) - cb_call = cache.callback(opt_state, x...) - if !(cb_call isa Bool) - error("The callback should return a boolean `halt` for whether to stop the optimization process. Please see the sciml_train documentation for information.") - elseif cb_call - break - end - msg = @sprintf("loss: %.3g", x[1]) - cache.progress && ProgressLogging.@logprogress msg i/maxiters - - if cache.solver_args.save_best - if first(x) < first(min_err) #found a better solution - min_opt = opt - min_err = x - min_θ = copy(θ) - end - if i == maxiters #Last iter, revert to best. - opt = min_opt - x = min_err - θ = min_θ - opt_state = Optimization.OptimizationState(; iter = i, - u = θ, - objective = x[1], - original = opt) - cache.callback(opt_state, x...) - break - end - end - Flux.update!(opt, θ, G) - end - end - - t1 = time() - stats = Optimization.OptimizationStats(; iterations = maxiters, - time = t1 - t0, fevals = maxiters, gevals = maxiters) - SciMLBase.build_solution(cache, opt, θ, x[1], stats = stats) - # here should be build_solution to create the output message -end - -end diff --git a/lib/OptimizationFlux/test/runtests.jl b/lib/OptimizationFlux/test/runtests.jl deleted file mode 100644 index bb91bd34f..000000000 --- a/lib/OptimizationFlux/test/runtests.jl +++ /dev/null @@ -1,46 +0,0 @@ -using OptimizationFlux, Optimization, ForwardDiff -using Test - -@testset "OptimizationFlux.jl" begin - rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 - x0 = zeros(2) - _p = [1.0, 100.0] - l1 = rosenbrock(x0, _p) - - optprob = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff()) - - prob = OptimizationProblem(optprob, x0, _p) - - sol = Optimization.solve(prob, Flux.Adam(0.1), maxiters = 1000) - @test 10 * sol.objective < l1 - - prob = OptimizationProblem(optprob, x0, _p) - sol = solve(prob, Flux.Adam(), maxiters = 1000, progress = false) - @test 10 * sol.objective < l1 - - @testset "cache" begin - objective(x, p) = (p[1] - x[1])^2 - x0 = zeros(1) - p = [1.0] - - prob = OptimizationProblem( - OptimizationFunction(objective, - Optimization.AutoForwardDiff()), x0, - p) - cache = Optimization.init(prob, Flux.Adam(0.1), maxiters = 1000) - sol = Optimization.solve!(cache) - @test sol.u≈[1.0] atol=1e-3 - - cache = Optimization.reinit!(cache; p = [2.0]) - sol = Optimization.solve!(cache) - @test sol.u≈[2.0] atol=1e-3 - end - - function cb(state, args...) - if state.iter % 10 == 0 - println(state.u) - end - return false - end - sol = solve(prob, Flux.Adam(0.1), callback = cb, maxiters = 100, progress = false) -end diff --git a/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl b/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl index 64004c515..88ef055eb 100644 --- a/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl +++ b/lib/OptimizationGCMAES/src/OptimizationGCMAES.jl @@ -48,11 +48,10 @@ function __map_optimizer_args(cache::OptimizationCache, opt::GCMAESOpt; end function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, - opt::GCMAESOpt, - data = Optimization.DEFAULT_DATA; σ0 = 0.2, + opt::GCMAESOpt; σ0 = 0.2, callback = (args...) -> (false), progress = false, kwargs...) - return OptimizationCache(prob, opt, data; σ0 = σ0, callback = callback, + return OptimizationCache(prob, opt; σ0 = σ0, callback = callback, progress = progress, kwargs...) end diff --git a/lib/OptimizationManopt/src/OptimizationManopt.jl b/lib/OptimizationManopt/src/OptimizationManopt.jl index 3f34b4f66..b32dc1185 100644 --- a/lib/OptimizationManopt/src/OptimizationManopt.jl +++ b/lib/OptimizationManopt/src/OptimizationManopt.jl @@ -403,14 +403,6 @@ function SciMLBase.__solve(cache::OptimizationCache{ throw(ArgumentError("Manifold not specified in the problem for e.g. `OptimizationProblem(f, x, p; manifold = SymmetricPositiveDefinite(5))`.")) end - if cache.data !== Optimization.DEFAULT_DATA - maxiters = length(cache.data) - else - maxiters = cache.solver_args.maxiters - end - - cur, state = iterate(cache.data) - function _cb(x, θ) opt_state = Optimization.OptimizationState(iter = 0, u = θ, @@ -419,13 +411,7 @@ function SciMLBase.__solve(cache::OptimizationCache{ if !(cb_call isa Bool) error("The callback should return a boolean `halt` for whether to stop the optimization process.") end - nx_itr = iterate(cache.data, state) - if isnothing(nx_itr) - true - else - cur, state = nx_itr - cb_call - end + cb_call end solver_kwarg = __map_optimizer_args!(cache, cache.opt, callback = _cb, maxiters = maxiters, diff --git a/lib/OptimizationMetaheuristics/src/OptimizationMetaheuristics.jl b/lib/OptimizationMetaheuristics/src/OptimizationMetaheuristics.jl index be4921240..fe7b345ab 100644 --- a/lib/OptimizationMetaheuristics/src/OptimizationMetaheuristics.jl +++ b/lib/OptimizationMetaheuristics/src/OptimizationMetaheuristics.jl @@ -66,11 +66,10 @@ function __map_optimizer_args!(cache::OptimizationCache, end function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, - opt::Metaheuristics.AbstractAlgorithm, - data = Optimization.DEFAULT_DATA; use_initial = false, + opt::Metaheuristics.AbstractAlgorithm; use_initial = false, callback = (args...) -> (false), progress = false, kwargs...) - return OptimizationCache(prob, opt, data; use_initial = use_initial, + return OptimizationCache(prob, opt; use_initial = use_initial, callback = callback, progress = progress, kwargs...) diff --git a/lib/OptimizationMultistartOptimization/src/OptimizationMultistartOptimization.jl b/lib/OptimizationMultistartOptimization/src/OptimizationMultistartOptimization.jl index 39d0d6895..cdec88403 100644 --- a/lib/OptimizationMultistartOptimization/src/OptimizationMultistartOptimization.jl +++ b/lib/OptimizationMultistartOptimization/src/OptimizationMultistartOptimization.jl @@ -11,11 +11,10 @@ SciMLBase.supports_opt_cache_interface(opt::MultistartOptimization.TikTak) = tru function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, opt::MultistartOptimization.TikTak, - local_opt, - data = Optimization.DEFAULT_DATA; + local_opt; use_threads = true, kwargs...) - return OptimizationCache(prob, opt, data; local_opt = local_opt, prob = prob, + return OptimizationCache(prob, opt; local_opt = local_opt, prob = prob, use_threads = use_threads, kwargs...) end diff --git a/lib/OptimizationOptimJL/test/runtests.jl b/lib/OptimizationOptimJL/test/runtests.jl index e9d37ea1b..15d10118c 100644 --- a/lib/OptimizationOptimJL/test/runtests.jl +++ b/lib/OptimizationOptimJL/test/runtests.jl @@ -1,6 +1,6 @@ using OptimizationOptimJL, OptimizationOptimJL.Optim, Optimization, ForwardDiff, Zygote, - Random, ModelingToolkit + Random, ModelingToolkit, OptimizationBase.DifferentiationInterface using Test struct CallbackTester @@ -42,7 +42,7 @@ end b = 0.5)); callback = CallbackTester(length(x0))) @test 10 * sol.objective < l1 - f = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff()) + f = OptimizationFunction(rosenbrock, SecondOrder(AutoForwardDiff(), AutoZygote())) Random.seed!(1234) prob = OptimizationProblem(f, x0, _p, lb = [-1.0, -1.0], ub = [0.8, 0.8]) diff --git a/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl b/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl index 8c3031bce..001a2dac6 100644 --- a/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl +++ b/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl @@ -8,7 +8,8 @@ SciMLBase.supports_opt_cache_interface(opt::AbstractRule) = true SciMLBase.requiresgradient(opt::AbstractRule) = true SciMLBase.allowsfg(opt::AbstractRule) = true -function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, opt::AbstractRule; save_best = true, +function SciMLBase.__init( + prob::SciMLBase.OptimizationProblem, opt::AbstractRule; save_best = true, callback = (args...) -> (false), epochs = nothing, progress = false, kwargs...) return OptimizationCache(prob, opt; save_best, callback, progress, epochs, diff --git a/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl b/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl index 13afb6557..6f9753189 100644 --- a/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl +++ b/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl @@ -19,7 +19,7 @@ SciMLBase.requiresgradient(opt::Union{BOBYQA, LINCOA, COBYLA}) = true SciMLBase.requiresconsjac(opt::Union{LINCOA, COBYLA}) = true function Optimization.OptimizationCache(prob::SciMLBase.OptimizationProblem, - opt::PRIMASolvers, data; + opt::PRIMASolvers; callback = Optimization.DEFAULT_CALLBACK, maxiters::Union{Number, Nothing} = nothing, maxtime::Union{Number, Nothing} = nothing, @@ -39,7 +39,7 @@ function Optimization.OptimizationCache(prob::SciMLBase.OptimizationProblem, return Optimization.OptimizationCache(f, reinit_cache, prob.lb, prob.ub, prob.lcons, prob.ucons, prob.sense, - opt, data, progress, callback, nothing, + opt, progress, callback, nothing, Optimization.OptimizationBase.AnalysisResults(nothing, nothing), merge((; maxiters, maxtime, abstol, reltol), NamedTuple(kwargs)))