diff --git a/src/functor.jl b/src/functor.jl index f09ac6ae93..e0168edf6b 100644 --- a/src/functor.jl +++ b/src/functor.jl @@ -64,12 +64,12 @@ Possible values of `inactive` are: """ function testmode!(m, mode) inactive = if mode isa Symbol - mode === :auto || throw(ArgumentError("testmode! accepts only the symbol :auto, got :$mode")) + mode === :auto || throw(ArgumentError(lazy"testmode! accepts only the symbol :auto, got :$mode")) nothing elseif mode isa Union{Bool,Nothing} mode else - throw(ArgumentError("testmode! does not accept $(repr(mode)) as the 2nd argument")) + throw(ArgumentError(lazy"testmode! does not accept $(repr(mode)) as the 2nd argument")) end foreach(x -> testmode!(x, inactive), trainable(m)) m diff --git a/src/layers/basic.jl b/src/layers/basic.jl index 018b19b31d..ac85827a41 100644 --- a/src/layers/basic.jl +++ b/src/layers/basic.jl @@ -75,7 +75,7 @@ function Base.show(io::IO, c::Chain) end _show_layers(io, layers::Tuple) = join(io, layers, ", ") -_show_layers(io, layers::NamedTuple) = join(io, ["$k = $v" for (k, v) in pairs(layers)], ", ") +_show_layers(io, layers::NamedTuple) = join(io, [lazy"$k = $v" for (k, v) in pairs(layers)], ", ") _show_layers(io, layers::AbstractVector) = (print(io, "["); join(io, layers, ", "); print(io, "]")) # This is a temporary and naive implementation @@ -531,7 +531,7 @@ function _parallel_check(layers, xs) nl = length(layers) nx = length(xs) if (nl != nx) - throw(ArgumentError("Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs")) + throw(ArgumentError(lazy"Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs")) end end ChainRulesCore.@non_differentiable _parallel_check(nl, nx) @@ -616,7 +616,7 @@ function _pairwise_check(x, layers, T) lx = length(x) N = length(layers) if T <: Tuple && lx != N - throw(ArgumentError("PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs")) + throw(ArgumentError(lazy"PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs")) end end ChainRulesCore.@non_differentiable _pairwise_check(lx, N, T) diff --git a/src/layers/macro.jl b/src/layers/macro.jl index ab6f6ae867..867bbbdcc2 100644 --- a/src/layers/macro.jl +++ b/src/layers/macro.jl @@ -70,7 +70,7 @@ macro layer(exs...) for j in 1:length(rest) ex = rest[j] - Meta.isexpr(ex, :(=)) || error("The macro `@layer` expects here `keyword = (fields...,)`, got $ex") + Meta.isexpr(ex, :(=)) || error(lazy"The macro `@layer` expects here `keyword = (fields...,)`, got $ex") name = if ex.args[1] == :trainable :(Optimisers.trainable) @@ -153,4 +153,4 @@ _macro_trainable(type, fun, field::Union{Symbol,QuoteNode}) = _macro_trainable(t _noquotenode(s::Symbol) = s _noquotenode(q::QuoteNode) = q.value # lets you write trainable=(:x,:y) instead of (x,y) -_noquotenode(ex) = error("expected a symbol here, as a field name, but got $ex") +_noquotenode(ex) = error("expected a symbol here, as a field name, but got ", ex) diff --git a/src/layers/normalise.jl b/src/layers/normalise.jl index c0a86c8796..c6663cca88 100644 --- a/src/layers/normalise.jl +++ b/src/layers/normalise.jl @@ -4,7 +4,7 @@ _isactive(m, x) = isnothing(m.active) ? NNlib.within_gradient(x) : m.active # Internal function, used only in this file. _tidy_active(mode::Bool) = mode _tidy_active(::Nothing) = nothing -_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError("active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto")) +_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError(lazy"active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto")) """ Dropout(p; [dims, rng, active]) @@ -74,7 +74,7 @@ end Dropout(p::Real, dims, active) = Dropout(p, dims, active, default_rng()) function Dropout(p::Real; dims=:, active::Union{Bool,Nothing} = nothing, rng = default_rng()) - 0 ≤ p ≤ 1 || throw(ArgumentError("Dropout expects 0 ≤ p ≤ 1, got p = $p")) + 0 ≤ p ≤ 1 || throw(ArgumentError(lazy"Dropout expects 0 ≤ p ≤ 1, got p = $p")) Dropout(p, dims, active, rng) end @@ -126,7 +126,7 @@ end AlphaDropout(p, active) = AlphaDropout(p, active, default_rng()) function AlphaDropout(p; rng = default_rng(), active::Union{Bool,Nothing} = nothing) - 0 ≤ p ≤ 1 || throw(ArgumentError("AlphaDropout expects 0 ≤ p ≤ 1, got p = $p")) + 0 ≤ p ≤ 1 || throw(ArgumentError(lazy"AlphaDropout expects 0 ≤ p ≤ 1, got p = $p")) AlphaDropout(p, active, rng) end diff --git a/src/optimise/train.jl b/src/optimise/train.jl index 883a7210c4..111207d479 100644 --- a/src/optimise/train.jl +++ b/src/optimise/train.jl @@ -92,7 +92,7 @@ function train!(loss, ps::Params, data, opt::AbstractOptimiser; cb = () -> ()) loss(batchmemaybe(d)...) end if !isfinite(l) - throw(DomainError("Loss is $l on data item $i, stopping training")) + throw(DomainError(lazy"Loss is $l on data item $i, stopping training")) end update!(opt, ps, gs) cb() diff --git a/src/outputsize.jl b/src/outputsize.jl index 60384e144f..5d6132d059 100644 --- a/src/outputsize.jl +++ b/src/outputsize.jl @@ -230,7 +230,7 @@ end function _makelazy(ex::Expr) n = _underscoredepth(ex) n == 0 && return ex - n == 1 && error("@autosize doesn't expect an underscore here: $ex") + n == 1 && error("@autosize doesn't expect an underscore here: ", ex) n == 2 && return :($LazyLayer($(string(ex)), $(_makefun(ex)), nothing)) n > 2 && return Expr(ex.head, map(_makelazy, ex.args)...) end diff --git a/src/train.jl b/src/train.jl index 246aef0c55..fd21e53f17 100644 --- a/src/train.jl +++ b/src/train.jl @@ -106,7 +106,7 @@ function train!(loss, model, data, opt; cb = nothing) d_splat = d isa Tuple ? d : (d,) l, gs = Zygote.withgradient(m -> loss(m, d_splat...), model) if !isfinite(l) - throw(DomainError("Loss is $l on data item $i, stopping training")) + throw(DomainError(lazy"Loss is $l on data item $i, stopping training")) end opt, model = Optimisers.update!(opt, model, gs[1]) @logprogress Base.haslength(data) ? i/length(data) : nothing