Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more losses #20

Merged
merged 14 commits into from
Oct 23, 2023
7 changes: 7 additions & 0 deletions src/GCPDecompositions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,13 @@ export AbstractLoss,
NonnegativeLeastSquaresLoss,
PoissonLoss,
PoissonLogLoss,
GammaLoss,
RayleighLoss,
BernoulliOddsLoss,
BernoulliLogitLoss,
NegativeBinomialOddsLoss,
HuberLoss,
BetaDivergenceLoss,
UserDefinedLoss
export GCPConstraints

Expand Down
179 changes: 178 additions & 1 deletion src/type-losses.jl
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,185 @@ value(::PoissonLogLoss, x, m) = exp(m) - x * m
deriv(::PoissonLogLoss, x, m) = exp(m) - x
domain(::PoissonLogLoss) = Interval(-Inf, +Inf)

# User-defined loss
"""
GammaLoss(eps::Real = 1e-10)

Loss corresponding to a statistical assumption of Gamma-distributed data `X`
with scale given by the low-rank model tensor `M`.

- **Distribution:** ``x_i \\sim \\operatorname{Gamma}(k, \\sigma_i)``
- **Link function:** ``m_i = k \\sigma_i``
- **Loss function:** ``f(x,m) = \\frac{x}{m + \\epsilon} + \\log(m + \\epsilon)``
- **Domain:** ``m \\in [0, \\infty)``
"""
struct GammaLoss{T<:Real} <: AbstractLoss
eps::T
GammaLoss{T}(eps::T) where {T<:Real} =
eps >= zero(eps) ? new(eps) :
throw(DomainError(eps, "Gamma loss requires nonnegative `eps`"))
end
GammaLoss(eps::T = 1e-10) where {T<:Real} = GammaLoss{T}(eps)
value(loss::GammaLoss, x, m) = x / (m + loss.eps) + log(m + loss.eps)
deriv(loss::GammaLoss, x, m) = -x / (m + loss.eps)^2 + inv(m + loss.eps)
domain(::GammaLoss) = Interval(0.0, +Inf)

"""
RayleighLoss(eps::Real = 1e-10)

Loss corresponding to the statistical assumption of Rayleigh data `X`
with sacle given by the low-rank model tensor `M`

- **Distribution:** ``x_i \\sim \\operatorname{Rayleigh}(\\theta_i)``
- **Link function:** ``m_i = \\sqrt{\\frac{\\pi}{2}\\theta_i}``
- **Loss function:** ``f(x, m) = 2\\log(m + \\epsilon) + \\frac{\\pi}{4}(\\frac{x}{m + \\epsilon})^2``
- **Domain:** ``m \\in [0, \\infty)``
"""
struct RayleighLoss{T<:Real} <: AbstractLoss
eps::T
RayleighLoss{T}(eps::T) where {T<:Real} =
eps >= zero(eps) ? new(eps) :
throw(DomainError(eps, "Rayleigh loss requires nonnegative `eps`"))
end
RayleighLoss(eps::T = 1e-10) where {T<:Real} = RayleighLoss{T}(eps)
value(loss::RayleighLoss, x, m) = 2*log(m + loss.eps) + (pi / 4) * ((x/(m + loss.eps))^2)
deriv(loss::RayleighLoss, x, m) = 2/(m + loss.eps) - (pi / 2) * (x^2 / (m + loss.eps)^3)
domain(::RayleighLoss) = Interval(0.0, +Inf)

"""
BernoulliOddsLoss(eps::Real = 1e-10)

Loss corresponding to the statistical assumption of Bernouli data `X`
with odds-sucess rate given by the low-rank model tensor `M`

- **Distribution:** ``x_i \\sim \\operatorname{Bernouli}(\\rho_i)``
- **Link function:** ``m_i = \\frac{\\rho_i}{1 - \\rho_i}``
- **Loss function:** ``f(x, m) = \\log(m + 1) - x\\log(m + \\epsilon)``
- **Domain:** ``m \\in [0, \\infty)``
"""
struct BernoulliOddsLoss{T<:Real} <: AbstractLoss
eps::T
BernoulliOddsLoss{T}(eps::T) where {T<:Real} =
eps >= zero(eps) ? new(eps) :
throw(DomainError(eps, "BernoulliOddsLoss requires nonnegative `eps`"))
end
BernoulliOddsLoss(eps::T = 1e-10) where {T<:Real} = BernoulliOddsLoss{T}(eps)
value(loss::BernoulliOddsLoss, x, m) = log(m + 1) - x * log(m + loss.eps)
deriv(loss::BernoulliOddsLoss, x, m) = 1 / (m + 1) - (x / (m + loss.eps))
domain(::BernoulliOddsLoss) = Interval(0.0, +Inf)


"""
BernoulliLogitLoss(eps::Real = 1e-10)

Loss corresponding to the statistical assumption of Bernouli data `X`
with log odds-success rate given by the low-rank model tensor `M`

- **Distribution:** ``x_i \\sim \\operatorname{Bernouli}(\\rho_i)``
- **Link function:** ``m_i = \\log(\\frac{\\rho_i}{1 - \\rho_i})``
- **Loss function:** ``f(x, m) = \\log(1 + e^m) - xm``
- **Domain:** ``m \\in \\mathbb{R}``
"""
struct BernoulliLogitLoss{T<:Real} <: AbstractLoss
eps::T
BernoulliLogitLoss{T}(eps::T) where {T<:Real} =
eps >= zero(eps) ? new(eps) :
throw(DomainError(eps, "BernoulliLogitsLoss requires nonnegative `eps`"))
end
BernoulliLogitLoss(eps::T = 1e-10) where {T<:Real} = BernoulliLogitLoss{T}(eps)
value(::BernoulliLogitLoss, x, m) = log(1 + exp(m)) - x * m
deriv(::BernoulliLogitLoss, x, m) = exp(m) / (1 + exp(m)) - x
domain(::BernoulliLogitLoss) = Interval(-Inf, +Inf)


"""
NegativeBinomialOddsLoss(r::Integer, eps::Real = 1e-10)

Loss corresponding to the statistical assumption of Negative Binomial
data `X` with log odds failure rate given by the low-rank model tensor `M`

- **Distribution:** ``x_i \\sim \\operatorname{NegativeBinomial}(r, \\rho_i) ``
- **Link function:** ``m = \\frac{\\rho}{1 - \\rho}``
- **Loss function:** ``f(x, m) = (r + x) \\log(1 + m) - x\\log(m + \\epsilon) ``
- **Domain:** ``m \\in [0, \\infty)``
"""
struct NegativeBinomialOddsLoss{S<:Integer, T<:Real} <: AbstractLoss
r::S
eps::T
function NegativeBinomialOddsLoss{S, T}(r::S, eps::T) where {S<: Integer, T<:Real}
eps >= zero(eps) ||
throw(DomainError(eps, "NegativeBinomialOddsLoss requires nonnegative `eps`"))
r >= zero(r) ||
throw(DomainError(r, "NegativeBinomialOddsLoss requires nonnegative `r`"))
new(r, eps)
end
end
NegativeBinomialOddsLoss(r::S, eps::T = 1e-10) where {S<:Integer, T<:Real} = NegativeBinomialOddsLoss{S, T}(r, eps)
value(loss::NegativeBinomialOddsLoss, x, m) = (loss.r + x) * log(1 + m) - x * log(m + loss.eps)
deriv(loss::NegativeBinomialOddsLoss, x, m) = (loss.r + x) / (1 + m) - x / (m + loss.eps)
domain(::NegativeBinomialOddsLoss) = Interval(0.0, +Inf)


"""
HuberLoss(Δ::Real)

Huber Loss for given Δ

- **Loss function:** ``f(x, m) = (x - m)^2 if \\abs(x - m)\\leq\\Delta, 2\\Delta\\abs(x - m) - \\Delta^2 otherwise``
- **Domain:** ``m \\in \\mathbb{R}``
"""
struct HuberLoss{T<:Real} <: AbstractLoss
Δ::T
HuberLoss{T}(Δ::T) where {T<:Real} =
Δ >= zero(Δ) ? new(Δ) :
throw(DomainError(Δ, "HuberLoss requires nonnegative `Δ`"))
end
HuberLoss(Δ::T) where {T<:Real} = HuberLoss{T}(Δ)
value(loss::HuberLoss, x, m) = abs(x - m) <= loss.Δ ? (x - m)^2 : 2 *loss. Δ * abs(x - m) - loss.Δ^2
deriv(loss::HuberLoss, x, m) = abs(x - m) <= loss.Δ ? -2 * (x - m) : -2 * sign(x - m) * loss.Δ * x
domain(::HuberLoss) = Interval(-Inf, +Inf)


"""
BetaDivergenceLoss(β::Real, eps::Real)

BetaDivergence Loss for given β

- **Loss function:** ``f(x, m; β) = \\frac{1}{\\beta}m^{\\beta} - \\frac{1}{\\beta - 1}xm^{\\beta - 1}
if \\beta \\in \\mathbb{R} \\{0, 1\\},
m - x\\log(m) if \\beta = 1,
\\frac{x}{m} + \\log(m) if \\beta = 0``
- **Domain:** ``m \\in [0, \\infty)``
"""
struct BetaDivergenceLoss{S<:Real, T<:Real} <: AbstractLoss
β::T
eps::T
BetaDivergenceLoss{S, T}(β::S, eps::T) where {S<:Real, T<:Real} =
eps >= zero(eps) ? new(β, eps) :
throw(DomainError(eps, "BetaDivergenceLoss requires nonnegative `eps`"))
end
BetaDivergenceLoss(β::S, eps::T = 1e-10) where {S<:Real, T<:Real} = BetaDivergenceLoss{S, T}(β, eps)
function value(loss::BetaDivergenceLoss, x, m)
if loss.β == 0
return x / (m + loss.eps) + log(m + loss.eps)
elseif loss.β == 1
return m - x * log(m + loss.eps)
else
return 1 / loss.β * m^loss.β - 1 / (loss.β - 1) * x * m^(loss.β - 1)
end
end
function deriv(loss::BetaDivergenceLoss, x, m)
if loss.β == 0
return -x / (m + loss.eps)^2 + 1 / (m + loss.eps)
elseif loss.β == 1
return 1 - x / (m + loss.eps)
else
return m^(loss.β - 1) - x * m^(loss.β - 2)
end
end
domain(::BetaDivergenceLoss) = Interval(0.0, +Inf)


# User-defined loss
"""
UserDefinedLoss

Expand Down
Loading
Loading