From 126e7bd78c3d1874b1e0d7c4bbec9bd37281a795 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 24 Nov 2024 09:55:13 -0500 Subject: [PATCH] more rebase problems Co-authored-by: Carlo Lucibello --- src/optimise/train.jl | 21 --------------------- src/train.jl | 2 +- 2 files changed, 1 insertion(+), 22 deletions(-) diff --git a/src/optimise/train.jl b/src/optimise/train.jl index af1ac86032..52488107ef 100644 --- a/src/optimise/train.jl +++ b/src/optimise/train.jl @@ -1,24 +1,3 @@ -using ProgressLogging: @progress, @withprogress, @logprogress -import Zygote: Params, gradient, withgradient - -# Add methods to Optimisers.jl's function, so that there is just one Flux.update! -# for both explicit and implicit parameters. -import Optimisers.update! - -""" - update!(opt, p, g) - update!(opt, ps::Params, gs) - -Perform an update step of the parameters `ps` (or the single parameter `p`) -according to optimiser `opt::AbstractOptimiser` and the gradients `gs` (the gradient `g`). - -As a result, the parameters are mutated and the optimiser's internal state may change. -The gradient could be mutated as well. - -!!! compat "Deprecated" - This method for implicit `Params` (and `AbstractOptimiser`) will be removed from Flux 0.15. - The explicit method `update!(opt, model, grad)` from Optimisers.jl will remain. -""" function update!(opt::AbstractOptimiser, x::AbstractArray, x̄) x̄r = copyto!(similar(x̄), x̄) # Flux.Optimise assumes it can mutate the gradient. This is not # safe due to aliasing, nor guaranteed to be possible, e.g. Fill. diff --git a/src/train.jl b/src/train.jl index cce7f3492c..e820f450d2 100644 --- a/src/train.jl +++ b/src/train.jl @@ -11,7 +11,7 @@ using Zygote: Zygote export setup, train! using ProgressLogging: @progress, @withprogress, @logprogress -using Zygote: Zygote, Params +using Zygote: Zygote using EnzymeCore: Duplicated """