From ab89ed4e12dbbfef62cbde78e4677a6202ba4be3 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:58:26 -0500 Subject: [PATCH] indent by two spaces --- ext/OptimisersEnzymeCoreExt.jl | 4 ++-- test/runtests.jl | 38 +++++++++++++++++----------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/ext/OptimisersEnzymeCoreExt.jl b/ext/OptimisersEnzymeCoreExt.jl index d54d883..1fb3ea6 100644 --- a/ext/OptimisersEnzymeCoreExt.jl +++ b/ext/OptimisersEnzymeCoreExt.jl @@ -16,8 +16,8 @@ For use with Enzyme's Duplicated, this just calls `setup(rule, model_grad.val)`. setup(rule::AbstractRule, model_grad::Duplicated) = setup(rule, model_grad.val) _setup(rule, x::Duplicated; cache) = throw(ArgumentError( - """Objects of type `Duplicated` are only supported by Optimisers.jl at top level, - they may not appear deep inside other objects.""" + """Objects of type `Duplicated` are only supported by Optimisers.jl at top level, + they may not appear deep inside other objects.""" )) """ diff --git a/test/runtests.jl b/test/runtests.jl index e17553c..956aa04 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -536,25 +536,25 @@ end end @testset "Enzyme Duplicated" begin - x_dx = Duplicated(Float16[1,2,3], Float16[1,0,-4]) - st = Optimisers.setup(Momentum(1/9), x_dx) # acts only on x not on dx - @test st isa Optimisers.Leaf - @test nothing === Optimisers.update!(st, x_dx) # mutates both arguments - @test x_dx.val ≈ Float16[0.8887, 2.0, 3.445] - - shared = [1.0] - model = (x=shared, y=shared) - grad = deepcopy(model) # Enzyme produces something like this, grad.x === grad.y, already accumulated. - dup = Duplicated(model, model) - st2 = Optimisers.setup(Descent(0.1), model) - Optimisers.update!(st2, dup) - @test model.x ≈ [0.9] - shared .= 1 - Optimisers.update!(st2, model, grad) - model.x ≈ [0.8] # This is wrong, but don't make it a test. - # Ideally, perhaps the 3-arg update! could notice that grad.x===grad.y, and not accumulate the gradient in this case? - - @test_throws ArgumentError Optimisers.setup(Adam(), (; a=[1,2,3.], b=x_dx)) # Duplicated deep inside is not allowed + x_dx = Duplicated(Float16[1,2,3], Float16[1,0,-4]) + st = Optimisers.setup(Momentum(1/9), x_dx) # acts only on x not on dx + @test st isa Optimisers.Leaf + @test nothing === Optimisers.update!(st, x_dx) # mutates both arguments + @test x_dx.val ≈ Float16[0.8887, 2.0, 3.445] + + shared = [1.0] + model = (x=shared, y=shared) + grad = deepcopy(model) # Enzyme produces something like this, grad.x === grad.y, already accumulated. + dup = Duplicated(model, model) + st2 = Optimisers.setup(Descent(0.1), model) + Optimisers.update!(st2, dup) + @test model.x ≈ [0.9] + shared .= 1 + Optimisers.update!(st2, model, grad) + model.x ≈ [0.8] # This is wrong, but don't make it a test. + # Ideally, perhaps the 3-arg update! could notice that grad.x===grad.y, and not accumulate the gradient in this case? + + @test_throws ArgumentError Optimisers.setup(Adam(), (; a=[1,2,3.], b=x_dx)) # Duplicated deep inside is not allowed end end @testset verbose=true "Destructure" begin