Skip to content

Commit

Permalink
test: skip some Enzyme tests
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal committed Nov 5, 2024
1 parent 58369a5 commit 318055f
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 5 deletions.
2 changes: 1 addition & 1 deletion lib/LuxLib/test/common_ops/conv_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ function run_conv_testing(gen_f::Function, activation, kernel, stride, padding,
(w, x, b) -> __f(activation, w, x, b, cdims)
end

skip_backends = []
skip_backends = [AutoEnzyme()]
mp = Tx != Tw
mp && push!(skip_backends, AutoReverseDiff())
((mp && ongpu) || (mode == "amdgpu" && (Tx == Float64 || Tw == Float64))) &&
Expand Down
3 changes: 2 additions & 1 deletion lib/LuxLib/test/normalization/groupnorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@ function run_groupnorm_testing(T, sz, groups, affine, act, aType, mode, ongpu)

if affine
__f = (args...) -> sum(groupnorm(args..., groups, act, epsilon))
@test_gradients(__f, x, scale, bias; atol, rtol, soft_fail)
@test_gradients(__f, x, scale, bias; atol, rtol, soft_fail,
skip_backends=[AutoEnzyme()])
end
end

Expand Down
2 changes: 1 addition & 1 deletion lib/LuxLib/test/normalization/instancenorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ function run_instancenorm_testing(gen_f, T, sz, training, act, aType, mode, ongp
__f = (args...) -> sum(first(instancenorm(
args..., rm, rv, training, act, T(0.1), epsilon)))
soft_fail = fp16 ? fp16 : [AutoFiniteDiff()]
skip_backends = (Sys.iswindows() && fp16) ? [AutoEnzyme()] : []
skip_backends = [AutoEnzyme()]
@test_gradients(__f, x, scale, bias; atol, rtol, soft_fail, skip_backends)
end
end
Expand Down
5 changes: 3 additions & 2 deletions lib/LuxLib/test/normalization/layernorm_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,11 @@ function run_layernorm_testing_core(
soft_fail = fp16 ? fp16 : [AutoFiniteDiff()]
if affine_shape !== nothing
__f = (args...) -> sum(_f(args...))
@test_gradients(__f, x, scale, bias; atol, rtol, soft_fail)
@test_gradients(__f, x, scale, bias; atol, rtol, soft_fail,
skip_backends=[AutoEnzyme()])
else
__f = x -> sum(_f(x, scale, bias))
@test_gradients(__f, x; atol, rtol, soft_fail)
@test_gradients(__f, x; atol, rtol, soft_fail, skip_backends=[AutoEnzyme()])
end

if anonact !== act
Expand Down

0 comments on commit 318055f

Please sign in to comment.