Skip to content

Commit

Permalink
More lazy strings
Browse files Browse the repository at this point in the history
  • Loading branch information
lassepe committed Mar 18, 2024
1 parent a8737d7 commit 4b5f1ff
Show file tree
Hide file tree
Showing 7 changed files with 13 additions and 13 deletions.
4 changes: 2 additions & 2 deletions src/functor.jl
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,12 @@ Possible values of `inactive` are:
"""
function testmode!(m, mode)
inactive = if mode isa Symbol
mode === :auto || throw(ArgumentError("testmode! accepts only the symbol :auto, got :$mode"))
mode === :auto || throw(ArgumentError(lazy"testmode! accepts only the symbol :auto, got :$mode"))
nothing
elseif mode isa Union{Bool,Nothing}
mode
else
throw(ArgumentError("testmode! does not accept $(repr(mode)) as the 2nd argument"))
throw(ArgumentError(lazy"testmode! does not accept $(repr(mode)) as the 2nd argument"))
end
foreach(x -> testmode!(x, inactive), trainable(m))
m
Expand Down
6 changes: 3 additions & 3 deletions src/layers/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ function Base.show(io::IO, c::Chain)
end

_show_layers(io, layers::Tuple) = join(io, layers, ", ")
_show_layers(io, layers::NamedTuple) = join(io, ["$k = $v" for (k, v) in pairs(layers)], ", ")
_show_layers(io, layers::NamedTuple) = join(io, [lazy"$k = $v" for (k, v) in pairs(layers)], ", ")
_show_layers(io, layers::AbstractVector) = (print(io, "["); join(io, layers, ", "); print(io, "]"))

# This is a temporary and naive implementation
Expand Down Expand Up @@ -531,7 +531,7 @@ function _parallel_check(layers, xs)
nl = length(layers)
nx = length(xs)
if (nl != nx)
throw(ArgumentError("Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs"))
throw(ArgumentError(lazy"Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs"))
end
end
ChainRulesCore.@non_differentiable _parallel_check(nl, nx)
Expand Down Expand Up @@ -616,7 +616,7 @@ function _pairwise_check(x, layers, T)
lx = length(x)
N = length(layers)
if T <: Tuple && lx != N
throw(ArgumentError("PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs"))
throw(ArgumentError(lazy"PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs"))
end
end
ChainRulesCore.@non_differentiable _pairwise_check(lx, N, T)
Expand Down
4 changes: 2 additions & 2 deletions src/layers/macro.jl
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ macro layer(exs...)

for j in 1:length(rest)
ex = rest[j]
Meta.isexpr(ex, :(=)) || error("The macro `@layer` expects here `keyword = (fields...,)`, got $ex")
Meta.isexpr(ex, :(=)) || error(lazy"The macro `@layer` expects here `keyword = (fields...,)`, got $ex")

name = if ex.args[1] == :trainable
:(Optimisers.trainable)
Expand Down Expand Up @@ -153,4 +153,4 @@ _macro_trainable(type, fun, field::Union{Symbol,QuoteNode}) = _macro_trainable(t

_noquotenode(s::Symbol) = s
_noquotenode(q::QuoteNode) = q.value # lets you write trainable=(:x,:y) instead of (x,y)
_noquotenode(ex) = error("expected a symbol here, as a field name, but got $ex")
_noquotenode(ex) = error("expected a symbol here, as a field name, but got ", ex)
6 changes: 3 additions & 3 deletions src/layers/normalise.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ _isactive(m, x) = isnothing(m.active) ? NNlib.within_gradient(x) : m.active
# Internal function, used only in this file.
_tidy_active(mode::Bool) = mode
_tidy_active(::Nothing) = nothing
_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError("active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto"))
_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError(lazy"active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto"))

"""
Dropout(p; [dims, rng, active])
Expand Down Expand Up @@ -74,7 +74,7 @@ end
Dropout(p::Real, dims, active) = Dropout(p, dims, active, default_rng())

function Dropout(p::Real; dims=:, active::Union{Bool,Nothing} = nothing, rng = default_rng())
0 p 1 || throw(ArgumentError("Dropout expects 0 ≤ p ≤ 1, got p = $p"))
0 p 1 || throw(ArgumentError(lazy"Dropout expects 0 ≤ p ≤ 1, got p = $p"))
Dropout(p, dims, active, rng)
end

Expand Down Expand Up @@ -126,7 +126,7 @@ end

AlphaDropout(p, active) = AlphaDropout(p, active, default_rng())
function AlphaDropout(p; rng = default_rng(), active::Union{Bool,Nothing} = nothing)
0 p 1 || throw(ArgumentError("AlphaDropout expects 0 ≤ p ≤ 1, got p = $p"))
0 p 1 || throw(ArgumentError(lazy"AlphaDropout expects 0 ≤ p ≤ 1, got p = $p"))
AlphaDropout(p, active, rng)
end

Expand Down
2 changes: 1 addition & 1 deletion src/optimise/train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ function train!(loss, ps::Params, data, opt::AbstractOptimiser; cb = () -> ())
loss(batchmemaybe(d)...)
end
if !isfinite(l)
throw(DomainError("Loss is $l on data item $i, stopping training"))
throw(DomainError(lazy"Loss is $l on data item $i, stopping training"))
end
update!(opt, ps, gs)
cb()
Expand Down
2 changes: 1 addition & 1 deletion src/outputsize.jl
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ end
function _makelazy(ex::Expr)
n = _underscoredepth(ex)
n == 0 && return ex
n == 1 && error("@autosize doesn't expect an underscore here: $ex")
n == 1 && error("@autosize doesn't expect an underscore here: ", ex)
n == 2 && return :($LazyLayer($(string(ex)), $(_makefun(ex)), nothing))
n > 2 && return Expr(ex.head, map(_makelazy, ex.args)...)
end
Expand Down
2 changes: 1 addition & 1 deletion src/train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ function train!(loss, model, data, opt; cb = nothing)
d_splat = d isa Tuple ? d : (d,)
l, gs = Zygote.withgradient(m -> loss(m, d_splat...), model)
if !isfinite(l)
throw(DomainError("Loss is $l on data item $i, stopping training"))
throw(DomainError(lazy"Loss is $l on data item $i, stopping training"))
end
opt, model = Optimisers.update!(opt, model, gs[1])
@logprogress Base.haslength(data) ? i/length(data) : nothing
Expand Down

0 comments on commit 4b5f1ff

Please sign in to comment.