Skip to content

Commit

Permalink
Drop Loss from loss functions
Browse files Browse the repository at this point in the history
Context is already provided by the module.
  • Loading branch information
dahong67 committed Jun 14, 2024
1 parent 1d9a07b commit fa2a3ff
Show file tree
Hide file tree
Showing 10 changed files with 153 additions and 160 deletions.
8 changes: 4 additions & 4 deletions benchmark/suites/gcp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ for sz in [(15, 20, 25), (30, 40, 50)], r in 1:2
M = CPD(ones(r), rand.(sz, r))
X = [M[I] for I in CartesianIndices(size(M))]
SUITE["least-squares-size(X)=$sz, rank(X)=$r"] =
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquaresLoss())
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquares())
end

# Benchmark Poisson loss
Expand All @@ -20,7 +20,7 @@ for sz in [(15, 20, 25), (30, 40, 50)], r in 1:2
M = CPD(fill(10.0, r), rand.(sz, r))
X = [rand(Poisson(M[I])) for I in CartesianIndices(size(M))]
SUITE["poisson-size(X)=$sz, rank(X)=$r"] =
@benchmarkable gcp($X, $r; loss = GCPLosses.PoissonLoss())
@benchmarkable gcp($X, $r; loss = GCPLosses.Poisson())
end

# Benchmark Gamma loss
Expand All @@ -30,7 +30,7 @@ for sz in [(15, 20, 25), (30, 40, 50)], r in 1:2
k = 1.5
X = [rand(Gamma(k, M[I] / k)) for I in CartesianIndices(size(M))]
SUITE["gamma-size(X)=$sz, rank(X)=$r"] =
@benchmarkable gcp($X, $r; loss = GCPLosses.GammaLoss())
@benchmarkable gcp($X, $r; loss = GCPLosses.Gamma())
end

# Benchmark BernoulliOdds Loss
Expand All @@ -39,7 +39,7 @@ for sz in [(15, 20, 25), (30, 40, 50)], r in 1:2
M = CPD(ones(r), rand.(sz, r))
X = [rand(Bernoulli(M[I] / (M[I] + 1))) for I in CartesianIndices(size(M))]
SUITE["bernoulliOdds-size(X)=$sz, rank(X)=$r"] =
@benchmarkable gcp($X, $r; loss = GCPLosses.BernoulliOddsLoss())
@benchmarkable gcp($X, $r; loss = GCPLosses.BernoulliOdds())
end

end
6 changes: 3 additions & 3 deletions benchmark/suites/leastsquares.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ for sz in [(15, 20, 25), (30, 40, 50), (60, 70, 80)], r in [1, 10, 50]
M = CPD(ones(r), rand.(sz, r))
X = [M[I] for I in CartesianIndices(size(M))]
SUITE["size(X)=$sz, rank(X)=$r"] =
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquaresLoss())
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquares())
end

# Order-4 tensors
Expand All @@ -22,7 +22,7 @@ for sz in [(15, 20, 25, 30), (30, 40, 50, 60)], r in [1, 10, 50]
M = CPD(ones(r), rand.(sz, r))
X = [M[I] for I in CartesianIndices(size(M))]
SUITE["least-squares-size(X)=$sz, rank(X)=$r"] =
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquaresLoss())
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquares())
end

# Order-5 tensors
Expand All @@ -31,7 +31,7 @@ for sz in [(15, 20, 25, 30, 35), (30, 30, 30, 30, 30)], r in [1, 10, 50]
M = CPD(ones(r), rand.(sz, r))
X = [M[I] for I in CartesianIndices(size(M))]
SUITE["least-squares-size(X)=$sz, rank(X)=$r"] =
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquaresLoss())
@benchmarkable gcp($X, $r; loss = GCPLosses.LeastSquares())
end

end
2 changes: 1 addition & 1 deletion docs/src/quickstart.md
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ For example,
to try non-negative least-squares simply run

```@repl quickstart
M_nonneg = gcp(X, 1; loss = GCPLosses.NonnegativeLeastSquaresLoss())
M_nonneg = gcp(X, 1; loss = GCPLosses.NonnegativeLeastSquares())
```

!!! tip "Congratulations!"
Expand Down
14 changes: 5 additions & 9 deletions src/GCPDecompositions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ end

"""
gcp(X::Array, r;
loss = GCPLosses.LeastSquaresLoss(),
loss = GCPLosses.LeastSquares(),
constraints = default_constraints(loss),
algorithm = default_algorithm(X, r, loss, constraints),
init = default_init(X, r, loss, constraints, algorithm))
Expand All @@ -42,7 +42,7 @@ Keyword arguments:
+ `constraints` : a `Tuple` of constraints on the factor matrices `U = (U[1],...,U[N])`.
+ `algorithm` : algorithm to use
Conventional CP corresponds to the default `GCPLosses.LeastSquaresLoss()` loss
Conventional CP corresponds to the default `GCPLosses.LeastSquares()` loss
with the default of no constraints (i.e., `constraints = ()`).
If the LossFunctions.jl package is also loaded,
Expand All @@ -55,7 +55,7 @@ See also: `CPD`, `GCPLosses`, `GCPConstraints`, `GCPAlgorithms`.
gcp(
X::Array,
r;
loss = GCPLosses.LeastSquaresLoss(),
loss = GCPLosses.LeastSquares(),
constraints = default_constraints(loss),
algorithm = default_algorithm(X, r, loss, constraints),
init = default_init(X, r, loss, constraints, algorithm),
Expand Down Expand Up @@ -91,12 +91,8 @@ loss function `loss`, and tuple of constraints `constraints`.
See also: `gcp`.
"""
default_algorithm(
X::Array{<:Real},
r,
loss::GCPLosses.LeastSquaresLoss,
constraints::Tuple{},
) = GCPAlgorithms.FastALS()
default_algorithm(X::Array{<:Real}, r, loss::GCPLosses.LeastSquares, constraints::Tuple{}) =
GCPAlgorithms.FastALS()
default_algorithm(X, r, loss, constraints) = GCPAlgorithms.LBFGSB()

"""
Expand Down
4 changes: 2 additions & 2 deletions src/gcp-algorithms/als.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
ALS
**A**lternating **L**east **S**quares.
Workhorse algorithm for `LeastSquaresLoss` with no constraints.
Workhorse algorithm for `LeastSquares` loss with no constraints.
Algorithm parameters:
Expand All @@ -17,7 +17,7 @@ end
function _gcp(
X::Array{TX,N},
r,
loss::GCPLosses.LeastSquaresLoss,
loss::GCPLosses.LeastSquares,
constraints::Tuple{},
algorithm::GCPAlgorithms.ALS,
init,
Expand Down
2 changes: 1 addition & 1 deletion src/gcp-algorithms/fastals.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ end
function _gcp(
X::Array{TX,N},
r,
loss::GCPLosses.LeastSquaresLoss,
loss::GCPLosses.LeastSquares,
constraints::Tuple{},
algorithm::GCPAlgorithms.FastALS,
init,
Expand Down
Loading

0 comments on commit fa2a3ff

Please sign in to comment.