Skip to content

Commit

Permalink
drop julia v1.9
Browse files Browse the repository at this point in the history
  • Loading branch information
CarloLucibello committed Oct 11, 2024
1 parent d7b21ba commit 2cac684
Show file tree
Hide file tree
Showing 6 changed files with 8 additions and 16 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
fail-fast: false
matrix:
version:
- '1.9' # Replace this with the minimum Julia version that your package supports.
- '1.10' # Replace this with the minimum Julia version that your package supports.
- '1'
os: [ubuntu-latest]
arch: [x64]
Expand Down
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,4 +65,4 @@ SpecialFunctions = "2.1.2"
Statistics = "1"
Zygote = "0.6.67"
cuDNN = "1"
julia = "1.9"
julia = "1.10"
2 changes: 1 addition & 1 deletion test/layers/normalisation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ end
@test Zygote.hessian_reverse(summ1, [1.0,2.0,3.0]) == zeros(3, 3)

m2 = Chain(BatchNorm(3), sum)
@test_broken Zygote.hessian_reverse(m2, Float32[1 2; 3 4; 5 6]) == zeros(Float32, 6, 6)
@test Zygote.hessian_reverse(m2, Float32[1 2; 3 4; 5 6]) == zeros(Float32, 6, 6) broken = VERSION >= v"1.11"
end

@testset "ForwardDiff" begin
Expand Down
8 changes: 0 additions & 8 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -65,14 +65,6 @@ Random.seed!(0)
@testset "functors" begin
include("functors.jl")
end

@static if VERSION == v"1.9"
using Documenter
@testset "Docs" begin
DocMeta.setdocmeta!(Flux, :DocTestSetup, :(using Flux); recursive=true)
doctest(Flux)
end
end
else
@info "Skipping CPU tests."
end
Expand Down
4 changes: 2 additions & 2 deletions test/train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ end
model.bias .= 0
pen2(x::AbstractArray) = sum(abs2, x)/2
opt = Flux.setup(Adam(0.1), model)
@test_broken begin
@test begin
trainfn!(model, data, opt) do m, x, y
err = Flux.mse(m(x), y)
l2 = sum(pen2, Flux.params(m))
Expand All @@ -157,7 +157,7 @@ end
diff2 = model.weight .- init_weight
@test diff1 diff2
true
end
end broken = VERSION >= v"1.11"
end

# Take 3: using WeightDecay instead. Need the /2 above, to match exactly.
Expand Down
6 changes: 3 additions & 3 deletions test/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -273,13 +273,13 @@ end
@testset "params gradient" begin
m = (x=[1,2.0], y=[3.0]);

@test_broken begin
# Explicit -- was broken by #2054 / then fixed / now broken again on julia v0.11
@test begin
# Explicit -- was broken by #2054 / then fixed / now broken again on julia v1.11
gnew = gradient(m -> (sum(norm, Flux.params(m))), m)[1]
@test gnew.x [0.4472135954999579, 0.8944271909999159]
@test gnew.y [1.0]
true
end
end broken = VERSION >= v"1.11"

# Implicit
gold = gradient(() -> (sum(norm, Flux.params(m))), Flux.params(m))
Expand Down

0 comments on commit 2cac684

Please sign in to comment.