diff --git a/Project.toml b/Project.toml index 5f7b61b17..08a6e6de6 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "Lux" uuid = "b2108857-7c20-44ae-9111-449ecde12c47" authors = ["Avik Pal and contributors"] -version = "1.2.1" +version = "1.2.2" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" diff --git a/src/extended_ops.jl b/src/extended_ops.jl index ce9f66215..fafe000f2 100644 --- a/src/extended_ops.jl +++ b/src/extended_ops.jl @@ -118,6 +118,7 @@ function ∇eachslice(Δ′, x::AbstractArray, ::Val{dims}) where {dims} idx = findfirst(Base.Fix2(isa, AbstractArray), Δs) idx === nothing && return zero.(x) Δ = similar(x) + fill!(Δ, false) for i in axes(x, dims) Δᵢ = selectdim(Δ, dims, i) copyto!(Δᵢ, Δs[i]) diff --git a/src/helpers/losses.jl b/src/helpers/losses.jl index 1af021e4c..493e1b357 100644 --- a/src/helpers/losses.jl +++ b/src/helpers/losses.jl @@ -317,8 +317,10 @@ function unsafe_apply_loss(loss::BinaryFocalLoss, ŷ, y) end @doc doc""" - CrossEntropyLoss(; agg=mean, epsilon=nothing, dims=1, - label_smoothing::Union{Nothing, Real}=nothing) + CrossEntropyLoss(; + agg=mean, epsilon=nothing, dims=1, logits::Union{Bool, Val}=Val(false), + label_smoothing::Union{Nothing, Real}=nothing + ) Return the cross entropy loss which is used in multi-class classification tasks. The input, $\hat{y}$, is expected to be normalized (i.e. `softmax` output) if `logits` is `false` or