diff --git a/docs/src/extended_examples/MNIST/notebook.jl b/docs/src/extended_examples/MNIST/notebook.jl index 448f50ee..810fae5f 100644 --- a/docs/src/extended_examples/MNIST/notebook.jl +++ b/docs/src/extended_examples/MNIST/notebook.jl @@ -3,12 +3,18 @@ # This tutorial is available as a Jupyter notebook or julia script # [here](https://github.com/FluxML/MLJFlux.jl/tree/dev/docs/src/extended_examples/MNIST). -using Pkg #!md -const DIR = @__DIR__ #!md -Pkg.activate(DIR) #!md -Pkg.instantiate() #!md +# The following code block assumes the current directory contains `Manifest.toml` and +# `Project.toml` files tested for this demonstration, available +# [here](https://github.com/FluxML/MLJFlux.jl/tree/dev/docs/src/extended_examples/MNIST). +# Otherwise, you can try running `using Pkg; Pkg.activate(temp=true)` instead, and +# manually add the relevant packages to the temporary environment created. + +using Pkg +const DIR = @__DIR__ +Pkg.activate(DIR) +Pkg.instantiate() -# **Julia version** is assumed to be 1.10.* +# **Julia version** is assumed to be ≥ 1.10** using MLJ using Flux diff --git a/docs/src/extended_examples/spam_detection/notebook.jl b/docs/src/extended_examples/spam_detection/notebook.jl index 3d712ebf..e855a1a6 100644 --- a/docs/src/extended_examples/spam_detection/notebook.jl +++ b/docs/src/extended_examples/spam_detection/notebook.jl @@ -10,9 +10,15 @@ # **Warning.** This demo includes some non-idiomatic use of MLJ to allow use of the # Flux.jl `Embedding` layer. It is not recommended for MLJ beginners. -using Pkg #!md -Pkg.activate(@__DIR__); #!md -Pkg.instantiate(); #!md +# The following code block assumes the current directory contains `Manifest.toml` and +# `Project.toml` files tested for this demonstration, available +# [here](https://github.com/FluxML/MLJFlux.jl/tree/dev/docs/src/extended_examples/spam_detection). +# Otherwise, you can try running `using Pkg; Pkg.activate(temp=true)` instead, and +# manually add the relevant packages to the temporary environment created. + +using Pkg +Pkg.activate(@__DIR__); +Pkg.instantiate(); # ### Basic Imports using MLJ diff --git a/src/types.jl b/src/types.jl index e7bb880d..2715836a 100644 --- a/src/types.jl +++ b/src/types.jl @@ -1190,16 +1190,14 @@ With the learning rate fixed, we can now compute a CV estimate of the performanc all data bound to `mach`) and compare this with performance on the test set: ```julia -# custom MLJ loss: -multi_loss(yhat, y) = l2(MLJ.matrix(yhat), MLJ.matrix(y)) # CV estimate, based on `(X, y)`: -evaluate!(mach, resampling=CV(nfolds=5), measure=multi_loss) +evaluate!(mach, resampling=CV(nfolds=5), measure=multitarget_l2) # loss for `(Xtest, test)`: fit!(mach) # trains on all data `(X, y)` yhat = predict(mach, Xtest) -multi_loss(yhat, ytest) +multitarget_l2(yhat, ytest) ``` See also