diff --git a/src/activation.jl b/src/activation.jl index 909208a57..a97b572da 100644 --- a/src/activation.jl +++ b/src/activation.jl @@ -28,12 +28,12 @@ You can also specify the coefficient explicitly, e.g. `leakyrelu(x, 0.01)`. leakyrelu(x, a = oftype(x, 0.01)) = max(a*x, x) """ - elu(x; α = 1) = max(α * (exp(x) - one(x)), x) + elu(x; α = 1) = x > 0 ? x : α * (exp(x) - one(x) Exponential Linear Unit activation function. See [Fast and Accurate Deep Network Learning by Exponential Linear Units](https://arxiv.org/abs/1511.07289) """ -elu(x, α = one(x)) = max(α * (exp(x) - one(x)), x) +elu(x, α = one(x)) = ifelse(x ≥ 0, x, α * (exp(x) - one(x))) """ swish(x) = x * σ(x) diff --git a/test/activation.jl b/test/activation.jl index daeb29809..00af421f9 100644 --- a/test/activation.jl +++ b/test/activation.jl @@ -13,7 +13,7 @@ xs = rand(5,5) @test elu(-4) ≈ (exp(-4) - 1) end -@test relu( 0.4,0.3) ≈ 0.4 -@test relu(-0.4,0.3) ≈ -0.12 +@test leakyrelu( 0.4,0.3) ≈ 0.4 +@test leakyrelu(-0.4,0.3) ≈ -0.12 end