Skip to content

Commit

Permalink
Merge pull request #215 from FluxML/dev
Browse files Browse the repository at this point in the history
For a 0.2.9 release
  • Loading branch information
ablaom authored Sep 22, 2022
2 parents ac253f1 + 452c09d commit 0602655
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "MLJFlux"
uuid = "094fc8d1-fd35-5302-93ea-dabda2abf845"
authors = ["Anthony D. Blaom <[email protected]>", "Ayush Shridhar <[email protected]>"]
version = "0.2.8"
version = "0.2.9"

[deps]
CategoricalArrays = "324d7699-5711-5eae-9e2f-1d82baa6b597"
Expand Down
4 changes: 2 additions & 2 deletions src/core.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ function train!(loss, penalty, chain, optimiser, X, y)
parameters = Flux.params(chain)
gs = Flux.gradient(parameters) do
yhat = chain(X[i])
batch_loss = loss(yhat, y[i]) + penalty(parameters)
batch_loss = loss(yhat, y[i]) + penalty(parameters)/n_batches
training_loss += batch_loss
return batch_loss
end
Expand Down Expand Up @@ -96,7 +96,7 @@ function fit!(loss, penalty, chain, optimiser, epochs, verbosity, X, y)

parameters = Flux.params(chain)
losses = (loss(chain(X[i]), y[i]) +
penalty(parameters) for i in 1:n_batches)
penalty(parameters)/n_batches for i in 1:n_batches)
history = [mean(losses),]

for i in 1:epochs
Expand Down

0 comments on commit 0602655

Please sign in to comment.