diff --git a/Project.toml b/Project.toml index 3af67f51c..3e6a8b47d 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "Lux" uuid = "b2108857-7c20-44ae-9111-449ecde12c47" authors = ["Avik Pal and contributors"] -version = "1.4.0" +version = "1.4.1-DEV" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" @@ -79,7 +79,7 @@ DispatchDoctor = "0.4.12" Enzyme = "0.13.16" EnzymeCore = "0.8.6" FastClosures = "0.3.2" -Flux = "0.14.25" +Flux = "0.15" ForwardDiff = "0.10.36" FunctionWrappers = "1.1.3" Functors = "0.5" @@ -95,7 +95,7 @@ MacroTools = "0.5.13" Markdown = "1.10" NCCL = "0.1.1" NNlib = "0.9.24" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Preferences = "1.4.3" Random = "1.10" Reactant = "0.2.6" @@ -107,7 +107,7 @@ SimpleChains = "0.4.7" Static = "1.1.1" StaticArraysCore = "1.4.3" Statistics = "1.10" -Tracker = "0.2.36" +Tracker = "0.2.37" WeightInitializers = "1" Zygote = "0.6.70" julia = "1.10" diff --git a/docs/Project.toml b/docs/Project.toml index ff948b0b3..2294d0bfd 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -52,7 +52,7 @@ LuxLib = "1.3.4" LuxTestUtils = "1.5" MLDataDevices = "1.6" NNlib = "0.9.24" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Pkg = "1.10" Printf = "1.10" Random = "1.10" diff --git a/examples/Basics/Project.toml b/examples/Basics/Project.toml index 1e32e0ea0..c7ea884bf 100644 --- a/examples/Basics/Project.toml +++ b/examples/Basics/Project.toml @@ -13,5 +13,5 @@ ComponentArrays = "0.15.18" ForwardDiff = "0.10" Lux = "1" LuxCUDA = "0.3" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Zygote = "0.6" diff --git a/examples/BayesianNN/Project.toml b/examples/BayesianNN/Project.toml index f1a9d476f..7beb286cc 100644 --- a/examples/BayesianNN/Project.toml +++ b/examples/BayesianNN/Project.toml @@ -14,6 +14,6 @@ Functors = "0.4, 0.5" LinearAlgebra = "1" Lux = "1.2" Random = "1" -Tracker = "0.2.36" +Tracker = "0.2.37" Turing = "0.34, 0.35" Zygote = "0.6.69" diff --git a/examples/ConvMixer/Project.toml b/examples/ConvMixer/Project.toml index d1ffac2cd..8ae780657 100644 --- a/examples/ConvMixer/Project.toml +++ b/examples/ConvMixer/Project.toml @@ -31,7 +31,7 @@ LuxCUDA = "0.3.2" MLDatasets = "0.7.14" MLUtils = "0.4.4" OneHotArrays = "0.2.5" -Optimisers = "0.4" +Optimisers = "0.4.1" PreferenceTools = "0.1.2" Printf = "1.10" ProgressBars = "1.5.1" diff --git a/examples/DDIM/Project.toml b/examples/DDIM/Project.toml index 4608d02b2..743d7fc1d 100644 --- a/examples/DDIM/Project.toml +++ b/examples/DDIM/Project.toml @@ -36,7 +36,7 @@ JLD2 = "0.4.48, 0.5" Lux = "1" LuxCUDA = "0.3" MLUtils = "0.4" -Optimisers = "0.3, 0.4" +Optimisers = "0.4.1" ParameterSchedulers = "0.4.1" ProgressBars = "1" Random = "1.10" diff --git a/examples/HyperNet/Project.toml b/examples/HyperNet/Project.toml index 501036806..965427971 100644 --- a/examples/HyperNet/Project.toml +++ b/examples/HyperNet/Project.toml @@ -21,7 +21,7 @@ LuxCUDA = "0.3" MLDatasets = "0.7" MLUtils = "0.4" OneHotArrays = "0.2.5" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Setfield = "1" Statistics = "1" Zygote = "0.6" diff --git a/examples/ImageNet/Project.toml b/examples/ImageNet/Project.toml index 5a0c02255..7f91b02db 100644 --- a/examples/ImageNet/Project.toml +++ b/examples/ImageNet/Project.toml @@ -38,7 +38,7 @@ MLUtils = "0.4.4" MPI = "0.20.21" NCCL = "0.1.1" OneHotArrays = "0.2.5" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" ParameterSchedulers = "0.4.2" Random = "1.10" Setfield = "1.1.1" diff --git a/examples/NeuralODE/Project.toml b/examples/NeuralODE/Project.toml index 9bb2456cc..a292da633 100644 --- a/examples/NeuralODE/Project.toml +++ b/examples/NeuralODE/Project.toml @@ -21,7 +21,7 @@ LuxCUDA = "0.3" MLDatasets = "0.7" MLUtils = "0.4" OneHotArrays = "0.2.5" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" OrdinaryDiffEqTsit5 = "1" SciMLSensitivity = "7.63" Statistics = "1" diff --git a/examples/PINN2DPDE/Project.toml b/examples/PINN2DPDE/Project.toml index 57abc282e..4b2b24bf0 100644 --- a/examples/PINN2DPDE/Project.toml +++ b/examples/PINN2DPDE/Project.toml @@ -18,7 +18,7 @@ Lux = "1" LuxCUDA = "0.3.3" MLUtils = "0.4.4" OnlineStats = "1.7.1" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Printf = "1.10" Random = "1.10" Statistics = "1.10" diff --git a/examples/PolynomialFitting/Project.toml b/examples/PolynomialFitting/Project.toml index 36c4d4e59..168865fdd 100644 --- a/examples/PolynomialFitting/Project.toml +++ b/examples/PolynomialFitting/Project.toml @@ -14,6 +14,6 @@ ADTypes = "1.10" CairoMakie = "0.12" Lux = "1" LuxCUDA = "0.3" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Statistics = "1" Zygote = "0.6" diff --git a/examples/SimpleChains/Project.toml b/examples/SimpleChains/Project.toml index c8ec5a59b..3f1b9b2a4 100644 --- a/examples/SimpleChains/Project.toml +++ b/examples/SimpleChains/Project.toml @@ -16,7 +16,7 @@ Lux = "1" MLDatasets = "0.7.14" MLUtils = "0.4" OneHotArrays = "0.2.5" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Random = "1" SimpleChains = "0.4.6" Zygote = "0.6.69" diff --git a/examples/SimpleRNN/Project.toml b/examples/SimpleRNN/Project.toml index 78cbad4b6..81e54f61e 100644 --- a/examples/SimpleRNN/Project.toml +++ b/examples/SimpleRNN/Project.toml @@ -16,6 +16,6 @@ JLD2 = "0.5" Lux = "1" LuxCUDA = "0.3" MLUtils = "0.4" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Statistics = "1" Zygote = "0.6" diff --git a/ext/LuxFluxExt.jl b/ext/LuxFluxExt.jl index d0f89b2b0..f901823ed 100644 --- a/ext/LuxFluxExt.jl +++ b/ext/LuxFluxExt.jl @@ -211,12 +211,6 @@ function Lux.convert_flux_model( return Lux.GroupNorm(l.chs, l.G, l.λ; l.affine, epsilon=l.ϵ) end -const _INVALID_TRANSFORMATION_TYPES = Union{<:Flux.Recur} - -function Lux.convert_flux_model(l::T; kwargs...) where {T <: _INVALID_TRANSFORMATION_TYPES} - throw(FluxModelConversionException("Transformation of type $(T) is not supported.")) -end - for cell in (:RNNCell, :LSTMCell, :GRUCell) msg = "Recurrent Cell: $(cell) for Flux has semantical difference with Lux, \ mostly in-terms of how the bias term is dealt with. Lux aligns with the Pytorch \ diff --git a/test/Project.toml b/test/Project.toml index 429ba85c5..58dd94c2e 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -65,7 +65,7 @@ MLUtils = "0.4.3" NNlib = "0.9.24" Octavian = "0.3.28" OneHotArrays = "0.2.5" -Optimisers = "0.3.4, 0.4" +Optimisers = "0.4.1" Pkg = "1.10" Preferences = "1.4.3" Random = "1.10" @@ -79,5 +79,5 @@ Static = "1" StaticArrays = "1.9" Statistics = "1.10" Test = "1.10" -Tracker = "0.2.36" +Tracker = "0.2.37" Zygote = "0.6.70" diff --git a/test/runtests.jl b/test/runtests.jl index 130ea0275..6837b9ae0 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -26,8 +26,7 @@ if ("all" in LUX_TEST_GROUP || "misc" in LUX_TEST_GROUP) push!(EXTRA_PKGS, Pkg.PackageSpec("MPI")) (BACKEND_GROUP == "all" || BACKEND_GROUP == "cuda") && push!(EXTRA_PKGS, Pkg.PackageSpec("NCCL")) - # XXX: Reactivate once Flux is compatible with Functors 0.5 - # push!(EXTRA_PKGS, Pkg.PackageSpec("Flux")) + push!(EXTRA_PKGS, Pkg.PackageSpec("Flux")) end if !Sys.iswindows() diff --git a/test/transform/flux_tests.jl b/test/transform/flux_tests.jl index 38bc1c176..6b9ab9d60 100644 --- a/test/transform/flux_tests.jl +++ b/test/transform/flux_tests.jl @@ -1,4 +1,4 @@ -@testitem "FromFluxAdaptor" setup=[SharedTestSetup] tags=[:misc] skip=:(true) begin +@testitem "FromFluxAdaptor" setup=[SharedTestSetup] tags=[:misc] begin import Flux toluxpsst = FromFluxAdaptor(; preserve_ps_st=true) @@ -8,10 +8,10 @@ @testset "$mode" for (mode, aType, dev, ongpu) in MODES @testset "Containers" begin @testset "Chain" begin - models = [Flux.Chain(Flux.Dense(2 => 5), Flux.Dense(5 => 1)), - Flux.Chain(; l1=Flux.Dense(2 => 5), l2=Flux.Dense(5 => 1))] |> dev - - for model in models + for model in [ + Flux.Chain(Flux.Dense(2 => 5), Flux.Dense(5 => 1)) |> dev, + Flux.Chain(; l1=Flux.Dense(2 => 5), l2=Flux.Dense(5 => 1)) |> dev + ] x = rand(Float32, 2, 1) |> aType model_lux = toluxpsst(model) @@ -57,10 +57,10 @@ end @testset "Parallel" begin - models = [Flux.Parallel(+, Flux.Dense(2 => 2), Flux.Dense(2 => 2)), - Flux.Parallel(+; l1=Flux.Dense(2 => 2), l2=Flux.Dense(2 => 2))] |> dev - - for model in models + for model in [ + Flux.Parallel(+, Flux.Dense(2 => 2), Flux.Dense(2 => 2)) |> dev, + Flux.Parallel(+; l1=Flux.Dense(2 => 2), l2=Flux.Dense(2 => 2)) |> dev + ] x = rand(Float32, 2, 1) |> aType model_lux = toluxpsst(model) @@ -94,8 +94,10 @@ @testset "Linear" begin @testset "Dense" begin - for model in [Flux.Dense(2 => 4) |> dev, - Flux.Dense(2 => 4; bias=false) |> dev] + for model in [ + Flux.Dense(2 => 4) |> dev, + Flux.Dense(2 => 4; bias=false) |> dev + ] x = randn(Float32, 2, 4) |> aType model_lux = toluxpsst(model) @@ -112,7 +114,9 @@ @testset "Scale" begin for model in [ - Flux.Scale(2) |> dev, Flux.Scale(2; bias=false) |> dev] + Flux.Scale(2) |> dev, + Flux.Scale(2; bias=false) |> dev + ] x = randn(Float32, 2, 4) |> aType model_lux = toluxpsst(model) @@ -128,8 +132,10 @@ end @testset "Bilinear" begin - for model in [Flux.Bilinear((2, 3) => 5) |> dev, - Flux.Bilinear((2, 3) => 5; bias=false) |> dev] + for model in [ + Flux.Bilinear((2, 3) => 5) |> dev, + Flux.Bilinear((2, 3) => 5; bias=false) |> dev + ] x = randn(Float32, 2, 4) |> aType y = randn(Float32, 3, 4) |> aType @@ -447,7 +453,7 @@ bias end - Flux.@functor CustomFluxLayer + Flux.@layer CustomFluxLayer (c::CustomFluxLayer)(x) = c.weight .* x .+ c.bias @@ -466,12 +472,5 @@ @test tolux(identity) isa Lux.NoOpLayer @test tolux(+) isa Lux.WrappedFunction end - - @testset "Unsupported Layers" begin - accum(h, x) = (h + x, x) - rnn = Flux.Recur(accum, 0) - - @test_throws Lux.FluxModelConversionException tolux(rnn) - end end end