Skip to content

Commit

Permalink
Update OptimizationPolyalgorithms.jl
Browse files Browse the repository at this point in the history
  • Loading branch information
Vaibhavdixit02 authored Nov 17, 2023
1 parent a7ab9f1 commit 6c14bbe
Showing 1 changed file with 7 additions and 4 deletions.
11 changes: 7 additions & 4 deletions lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,24 @@ function SciMLBase.__solve(prob::OptimizationProblem,
if isempty(args) && deterministic && prob.lb === nothing && prob.ub === nothing
# If deterministic then ADAM -> finish with BFGS
if maxiters === nothing
res1 = Optimization.solve(prob, Optimisers.ADAM(0.01), args...; maxiters = 300,
res1 = Optimization.solve(prob, Optimisers.ADAM(0.1), args...; maxiters = 300,

Check warning on line 24 in lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl

View check run for this annotation

Codecov / codecov/patch

lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl#L24

Added line #L24 was not covered by tests
kwargs...)
else
res1 = Optimization.solve(prob, Optimisers.ADAM(0.01), args...; maxiters,
res1 = Optimization.solve(prob, Optimisers.ADAM(0.1), args...; maxiters,

Check warning on line 27 in lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl

View check run for this annotation

Codecov / codecov/patch

lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl#L27

Added line #L27 was not covered by tests
kwargs...)
end

optprob2 = remake(prob, u0 = res1.u)
res1 = Optimization.solve(optprob2, BFGS(initial_stepnorm = 0.01), args...;
res1 = Optimization.solve(optprob2, LBFGS(initial_stepnorm = 0.01), args...;

Check warning on line 32 in lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl

View check run for this annotation

Codecov / codecov/patch

lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl#L32

Added line #L32 was not covered by tests
maxiters, kwargs...)
elseif isempty(args) && deterministic
res1 = Optimization.solve(prob, BFGS(initial_stepnorm = 0.01), args...; maxiters,
res1 = Optimization.solve(prob, LBFGS(), args...; maxiters,

Check warning on line 35 in lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl

View check run for this annotation

Codecov / codecov/patch

lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl#L35

Added line #L35 was not covered by tests
kwargs...)
else
res1 = Optimization.solve(prob, Optimisers.ADAM(0.1), args...; maxiters, kwargs...)
optprob2 = remake(prob, u0 = res1.u)
res1 = Optimization.solve(optprob2, Optimisers.ADAM(0.1), args...;

Check warning on line 40 in lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl

View check run for this annotation

Codecov / codecov/patch

lib/OptimizationPolyalgorithms/src/OptimizationPolyalgorithms.jl#L39-L40

Added lines #L39 - L40 were not covered by tests
maxiters÷10, kwargs...)
end
end

Expand Down

0 comments on commit 6c14bbe

Please sign in to comment.