From 318055f667d95be7c4f46b28e88d766fad3dd92c Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Mon, 4 Nov 2024 19:29:49 -0500 Subject: [PATCH] test: skip some Enzyme tests --- lib/LuxLib/test/common_ops/conv_tests.jl | 2 +- lib/LuxLib/test/normalization/groupnorm_tests.jl | 3 ++- lib/LuxLib/test/normalization/instancenorm_tests.jl | 2 +- lib/LuxLib/test/normalization/layernorm_tests.jl | 5 +++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/LuxLib/test/common_ops/conv_tests.jl b/lib/LuxLib/test/common_ops/conv_tests.jl index c7426b205e..13ec44d5a1 100644 --- a/lib/LuxLib/test/common_ops/conv_tests.jl +++ b/lib/LuxLib/test/common_ops/conv_tests.jl @@ -63,7 +63,7 @@ function run_conv_testing(gen_f::Function, activation, kernel, stride, padding, (w, x, b) -> __f(activation, w, x, b, cdims) end - skip_backends = [] + skip_backends = [AutoEnzyme()] mp = Tx != Tw mp && push!(skip_backends, AutoReverseDiff()) ((mp && ongpu) || (mode == "amdgpu" && (Tx == Float64 || Tw == Float64))) && diff --git a/lib/LuxLib/test/normalization/groupnorm_tests.jl b/lib/LuxLib/test/normalization/groupnorm_tests.jl index 627f5959aa..ada68c9f86 100644 --- a/lib/LuxLib/test/normalization/groupnorm_tests.jl +++ b/lib/LuxLib/test/normalization/groupnorm_tests.jl @@ -74,7 +74,8 @@ function run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu) if affine __f = (args...) -> sum(groupnorm(args..., groups, act, epsilon)) - @test_gradients(__f, x, scale, bias; atol, rtol, soft_fail) + @test_gradients(__f, x, scale, bias; atol, rtol, soft_fail, + skip_backends=[AutoEnzyme()]) end end diff --git a/lib/LuxLib/test/normalization/instancenorm_tests.jl b/lib/LuxLib/test/normalization/instancenorm_tests.jl index 0bc3155caf..aeb1d66cc7 100644 --- a/lib/LuxLib/test/normalization/instancenorm_tests.jl +++ b/lib/LuxLib/test/normalization/instancenorm_tests.jl @@ -66,7 +66,7 @@ function run_instancenorm_testing(gen_f, T, sz, training, act, aType, mode, ongp __f = (args...) -> sum(first(instancenorm( args..., rm, rv, training, act, T(0.1), epsilon))) soft_fail = fp16 ? fp16 : [AutoFiniteDiff()] - skip_backends = (Sys.iswindows() && fp16) ? [AutoEnzyme()] : [] + skip_backends = [AutoEnzyme()] @test_gradients(__f, x, scale, bias; atol, rtol, soft_fail, skip_backends) end end diff --git a/lib/LuxLib/test/normalization/layernorm_tests.jl b/lib/LuxLib/test/normalization/layernorm_tests.jl index 09b671b111..316606ed6c 100644 --- a/lib/LuxLib/test/normalization/layernorm_tests.jl +++ b/lib/LuxLib/test/normalization/layernorm_tests.jl @@ -58,10 +58,11 @@ function run_layernorm_testing_core( soft_fail = fp16 ? fp16 : [AutoFiniteDiff()] if affine_shape !== nothing __f = (args...) -> sum(_f(args...)) - @test_gradients(__f, x, scale, bias; atol, rtol, soft_fail) + @test_gradients(__f, x, scale, bias; atol, rtol, soft_fail, + skip_backends=[AutoEnzyme()]) else __f = x -> sum(_f(x, scale, bias)) - @test_gradients(__f, x; atol, rtol, soft_fail) + @test_gradients(__f, x; atol, rtol, soft_fail, skip_backends=[AutoEnzyme()]) end if anonact !== act