From 65c37c178b9a38f3a5debfdeedabecdb33fadaf3 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 2 Dec 2020 09:39:18 +0100 Subject: [PATCH] remove typewarn --- docs/src/performance.md | 16 ++++++++-------- src/layers/basic.jl | 4 ---- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/docs/src/performance.md b/docs/src/performance.md index c78ff1a115..36eea32e95 100644 --- a/docs/src/performance.md +++ b/docs/src/performance.md @@ -13,7 +13,6 @@ not because the operations are faster, but because the memory usage is halved. Which means allocations occur much faster. And you use less memory. - ## Preserve inputs' types Not only should your activation and loss functions be [type-stable](https://docs.julialang.org/en/v1/manual/performance-tips/#Write-%22type-stable%22-functions-1), @@ -21,8 +20,8 @@ they should also preserve the type of their inputs. A very artificial example using an activation function like -``` - my_tanh(x) = Float64(tanh(x)) +```julia +my_tanh(x) = Float64(tanh(x)) ``` will result in performance on `Float32` input orders of magnitude slower than the normal `tanh` would, @@ -35,20 +34,21 @@ you will see a large slow-down. This can occur sneakily, because you can cause type-promotion by interacting with a numeric literals. E.g. the following will have run into the same problem as above: -``` - leaky_tanh(x) = 0.01*x + tanh(x) +```julia +leaky_tanh(x) = 0.01*x + tanh(x) ``` While one could change the activation function (e.g. to use `0.01f0*x`), the idiomatic (and safe way) to avoid type casts whenever inputs changes is to use `oftype`: -``` - leaky_tanh(x) = oftype(x/1, 0.01)*x + tanh(x) -``` +```julia +leaky_tanh(x) = oftype(x/1, 0.01)*x + tanh(x) +``` ## Evaluate batches as Matrices of features While it can sometimes be tempting to process your observations (feature vectors) one at a time e.g. + ```julia function loss_total(xs::AbstractVector{<:Vector}, ys::AbstractVector{<:Vector}) sum(zip(xs, ys)) do (x, y_target) diff --git a/src/layers/basic.jl b/src/layers/basic.jl index 8805935d10..4acddf5243 100644 --- a/src/layers/basic.jl +++ b/src/layers/basic.jl @@ -120,7 +120,6 @@ end @functor Dense function (a::Dense)(x::AbstractVecOrMat) - eltype(a.W) == eltype(x) || _dense_typewarn(a, x) W, b, σ = a.W, a.b, a.σ # reshape to handle dims > 1 as batch dimensions sz = size(x) @@ -129,9 +128,6 @@ function (a::Dense)(x::AbstractVecOrMat) return reshape(x, :, sz[2:end]...) end -_dense_typewarn(d, x) = @warn "Element types don't match for layer $d, this will be slow." typeof(d.W) typeof(x) maxlog=1 -Zygote.@nograd _dense_typewarn - function Base.show(io::IO, l::Dense) print(io, "Dense(", size(l.W, 2), ", ", size(l.W, 1)) l.σ == identity || print(io, ", ", l.σ)