diff --git a/docs/src/guide/saving.md b/docs/src/guide/saving.md index fb00454eec..57f2b9bdb9 100644 --- a/docs/src/guide/saving.md +++ b/docs/src/guide/saving.md @@ -21,7 +21,12 @@ julia> Flux.@layer MyModel julia> MyModel() = MyModel(Chain(Dense(10 => 5, relu), Dense(5 => 2))); julia> model = MyModel() -MyModel(Chain(Dense(10 => 5, relu), Dense(5 => 2))) # 67 parameters +MyModel( + Chain( + Dense(10 => 5, relu), # 55 parameters + Dense(5 => 2), # 12 parameters + ), +) # Total: 4 arrays, 67 parameters, 484 bytes. julia> model_state = Flux.state(model); diff --git a/src/layers/macro.jl b/src/layers/macro.jl index 8ab26102f2..cd711f3b9c 100644 --- a/src/layers/macro.jl +++ b/src/layers/macro.jl @@ -38,13 +38,13 @@ Trio( Dropout(0.4), ) # Total: 3 arrays, 4 parameters, 240 bytes. -# Freeze `c`, equivalent to `Optimisers.trainable(tri::Trio) = (; tri.a, tri.b)` -julia> Flux.@layer Trio trainable=(a,b) +julia> Flux.@layer :noexpand Trio trainable=(a,b) -# Now the optimizer's state won't contain `c` -julia> opt_state = Flux.setup(Adam(), tri); -``` +julia> tri # now the layer is printed compactly +Trio(Dense(2 => 1, tanh), Dense(1 => 1; bias=false), Dropout(0.4)) # 4 parameters +julia> opt_state = Flux.setup(Adam(), tri); # `c` is not in the optimizer state +``` """ macro layer(exs...) _layer_macro(exs...)