Skip to content

Commit

Permalink
Merge #406
Browse files Browse the repository at this point in the history
406: Add dims kwargs and improve gradient computation for logsumexp r=MikeInnes a=xukai92



Co-authored-by: Kai Xu <[email protected]>
  • Loading branch information
bors[bot] and xukai92 authored Dec 9, 2019
2 parents b72c79a + f4b7133 commit 9c03dd0
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 6 deletions.
9 changes: 3 additions & 6 deletions src/lib/statsfuns.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,7 @@ end
return log1pexp(x), Δ->* (x < 9f0 ? logistic(x) : x < 16f0 ? 1 - exp(-x) : 1),)
end

@adjoint function logsumexp(X::AbstractArray{<:Real})
return logsumexp(X), function(Δ)
y = StatsFuns.softmax(X)
y .*= Δ
return (y,)
end
@adjoint function logsumexp(X::AbstractArray{<:Real}; dims=:)
lse = logsumexp(X; dims=dims)
return lse, Δ ->.* exp.(X .- lse),)
end
1 change: 1 addition & 0 deletions test/gradcheck.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1039,6 +1039,7 @@ end
@test gradtest(StatsFuns.logsumexp, randn(rng, 1, 1))
@test gradtest(StatsFuns.logsumexp, randn(rng, 3))
@test gradtest(StatsFuns.logsumexp, randn(rng, 3, 4, 5))
@test gradtest(x -> sum(StatsFuns.logsumexp(x; dims=1)), randn(rng, 4, 4))
end
end

Expand Down

0 comments on commit 9c03dd0

Please sign in to comment.