Skip to content

Commit

Permalink
test fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
MikeInnes committed Oct 18, 2017
1 parent fb13bd3 commit e4b48c1
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions src/activation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@ You can also specify the coefficient explicitly, e.g. `leakyrelu(x, 0.01)`.
leakyrelu(x, a = oftype(x, 0.01)) = max(a*x, x)

"""
elu(x; α = 1) = max(α * (exp(x) - one(x)), x)
elu(x; α = 1) = x > 0 ? x : α * (exp(x) - one(x)
Exponential Linear Unit activation function. See [Fast and Accurate Deep Network
Learning by Exponential Linear Units](https://arxiv.org/abs/1511.07289)
"""
elu(x, α = one(x)) = max(α * (exp(x) - one(x)), x)
elu(x, α = one(x)) = ifelse(x 0, x, α * (exp(x) - one(x)))

"""
swish(x) = x * σ(x)
Expand Down
4 changes: 2 additions & 2 deletions test/activation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ xs = rand(5,5)
@test elu(-4) (exp(-4) - 1)
end

@test relu( 0.4,0.3) 0.4
@test relu(-0.4,0.3) -0.12
@test leakyrelu( 0.4,0.3) 0.4
@test leakyrelu(-0.4,0.3) -0.12

end

0 comments on commit e4b48c1

Please sign in to comment.