Skip to content

Commit

Permalink
Add OptimizationFunction conversion and adtype arg and tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Vaibhavdixit02 committed Nov 7, 2023
1 parent c838cf2 commit 4856a9d
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 2 deletions.
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ Statistics = "1"
SymbolicIndexingInterface = "0.2"
Tables = "1"
TruncatedStacktraces = "1"
QuasiMonteCarlo = "0.3"
Zygote = "0.6"
julia = "1.9"

Expand Down
11 changes: 9 additions & 2 deletions src/problems/basic_problems.jl
Original file line number Diff line number Diff line change
Expand Up @@ -688,11 +688,18 @@ function OptimizationProblem(f, args...; kwargs...)
OptimizationProblem{true}(OptimizationFunction{true}(f), args...; kwargs...)
end

function OptimizationProblem(prob::NonlinearLeastSquaresProblem; kwargs...)
function OptimizationFunction(f::NonlinearFunction, adtype::AbstractADType = NoAD(); kwargs...)
if isinplace(f)
throw(ArgumentError("Converting NonlinearFunction to OptimizationFunction is not supported with in-place functions yet."))
end
OptimizationFunction((u, p) -> sum(abs2, f(u, p)), adtype; kwargs...)
end

function OptimizationProblem(prob::NonlinearLeastSquaresProblem, adtype::AbstractADType = NoAD(); kwargs...)
if isinplace(prob)
throw(ArgumentError("Converting NonlinearLeastSquaresProblem to OptimizationProblem is not supported with in-place functions yet."))
end
optf = OptimizationFunction(sum prob.f, grad = (Jv, u, p) -> prob.f.jvp(Jv, prob.f(u, p), u, p), kwargs...)
optf = OptimizationFunction(prob.f, adtype; kwargs...)
return OptimizationProblem(optf, prob.u0, prob.p; prob.kwargs..., kwargs...)
end

Expand Down
29 changes: 29 additions & 0 deletions test/downstream/nllsopt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
using NonlinearSolve, Optimization, OptimizationNLopt, ForwardDiff
import FastLevenbergMarquardt, LeastSquaresOptim

true_function(x, θ) = @. θ[1] * exp(θ[2] * x) * cos(θ[3] * x + θ[4])

θ_true = [1.0, 0.1, 2.0, 0.5]

x = [-1.0, -0.5, 0.0, 0.5, 1.0]

y_target = true_function(x, θ_true)

function loss_function(θ, p)
= true_function(p, θ)
return.- y_target
end

θ_init = θ_true .+ randn!(similar(θ_true)) * 0.1
prob_oop = NonlinearLeastSquaresProblem{false}(loss_function, θ_init, x)

solver = LevenbergMarquardt()

@time sol = solve(prob, solver; maxiters = 10000, abstol = 1e-8)

optf = OptimizationFunction(prob_oop.f, AutoForwardDiff())
optprob = OptimizationProblem(optf, prob_oop.u0, prob_oop.p)
@time sol = solve(optprob, NLopt.LD_LBFGS(); maxiters = 10000, abstol = 1e-8)

optprob = OptimizationProblem(prob_oop, AutoForwardDiff())
@time sol = solve(optprob, NLopt.LD_LBFGS(); maxiters = 10000, abstol = 1e-8)

0 comments on commit 4856a9d

Please sign in to comment.