From 0e61db3d75e8b0d8e339bcd6249fad074d6b5cfa Mon Sep 17 00:00:00 2001 From: Sathvik Bhagavan Date: Fri, 1 Mar 2024 03:28:18 +0000 Subject: [PATCH] docs: use `Adam` instead of `ADAM` in missing_physics tutorial --- docs/src/showcase/missing_physics.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/showcase/missing_physics.md b/docs/src/showcase/missing_physics.md index a00f32ff085..52a153ed32a 100644 --- a/docs/src/showcase/missing_physics.md +++ b/docs/src/showcase/missing_physics.md @@ -258,7 +258,7 @@ Thus we first solve the optimization problem with ADAM. Choosing a learning rate (tuned to be as high as possible that doesn't tend to make the loss shoot up), we see: ```@example ude -res1 = Optimization.solve(optprob, ADAM(), callback = callback, maxiters = 5000) +res1 = Optimization.solve(optprob, OptimizationOptimisers.Adam(), callback = callback, maxiters = 5000) println("Training loss after $(length(losses)) iterations: $(losses[end])") ``` @@ -267,7 +267,7 @@ second optimization, and run it with BFGS. This looks like: ```@example ude optprob2 = Optimization.OptimizationProblem(optf, res1.u) -res2 = Optimization.solve(optprob2, Optim.LBFGS(linesearch = BackTracking()), callback = callback, maxiters = 1000) +res2 = Optimization.solve(optprob2, LBFGS(linesearch = BackTracking()), callback = callback, maxiters = 1000) println("Final training loss after $(length(losses)) iterations: $(losses[end])") # Rename the best candidate