Skip to content

Commit

Permalink
unbreak
Browse files Browse the repository at this point in the history
  • Loading branch information
CarloLucibello committed Oct 22, 2024
1 parent c9bab66 commit 1fb6f9d
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ ChainRulesCore = "1.12"
Compat = "4.10.0"
Enzyme = "0.12, 0.13"
Functors = "0.4"
MLDataDevices = "1.4.0"
MLDataDevices = "1.4.1"
MLUtils = "0.4"
MPI = "0.20.19"
MacroTools = "0.5"
Expand Down
8 changes: 4 additions & 4 deletions test/ext_cuda/cuda.jl
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,10 @@ end

# Even more trivial: no movement
@test gradient(x -> sum(abs, cpu(x)), a)[1] isa Matrix
@test_broken gradient(x -> sum(abs, cpu(x)), a')[1] isa Matrix
@test gradient(x -> sum(abs, cpu(x)), a')[1] isa Matrix
@test gradient(x -> sum(cpu(x)), a)[1] isa typeof(gradient(sum, a)[1]) # FillArray
@test gradient(x -> sum(abs, gpu(x)), ca)[1] isa CuArray
@test_broken gradient(x -> sum(abs, gpu(x)), ca')[1] isa CuArray
@test gradient(x -> sum(abs, gpu(x)), ca')[1] isa CuArray

# More complicated, Array * CuArray is an error
g0 = gradient(x -> sum(abs, (a * (a * x))), a)[1]
Expand All @@ -131,8 +131,8 @@ end

# Scalar indexing of an array, needs OneElement to transfer to GPU
# https://github.com/FluxML/Zygote.jl/issues/1005
@test_broken gradient(x -> cpu(2 .* gpu(x))[1], Float32[1,2,3]) == ([2,0,0],)
@test_broken gradient(x -> cpu(gpu(x) * gpu(x))[1,2], Float32[1 2 3; 4 5 6; 7 8 9]) == ([2 6 8; 0 2 0; 0 3 0],)
@test gradient(x -> cpu(2 .* gpu(x))[1], Float32[1,2,3]) == ([2,0,0],)
@test gradient(x -> cpu(gpu(x) * gpu(x))[1,2], Float32[1 2 3; 4 5 6; 7 8 9]) == ([2 6 8; 0 2 0; 0 3 0],)
end

@testset "gpu(x) and cpu(x) on structured arrays" begin
Expand Down

0 comments on commit 1fb6f9d

Please sign in to comment.