From 3946a68e8d30614315b5b5c3353e29716e8efc4d Mon Sep 17 00:00:00 2001 From: Essam Date: Wed, 22 May 2024 17:31:07 +0300 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20Landing=20page=20and=20style=20chan?= =?UTF-8?q?ges?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/Manifest.toml | 2 +- docs/Project.toml | 1 + docs/make.jl | 5 ++ docs/src/assets/light.scss | 13 ++-- docs/src/assets/themes/documenter-light.css | 8 ++- docs/src/index.md | 72 ++++++++++++++++++++- 6 files changed, 92 insertions(+), 9 deletions(-) diff --git a/docs/Manifest.toml b/docs/Manifest.toml index bf9c20ea..d9bc7bb7 100644 --- a/docs/Manifest.toml +++ b/docs/Manifest.toml @@ -2,7 +2,7 @@ julia_version = "1.10.0" manifest_format = "2.0" -project_hash = "8e66a79f19bfabe2e689a52672b8d43a78056f03" +project_hash = "8237dd01902c50351547fc838fcc6b6ea3cfb2cb" [[deps.ANSIColoredPrinters]] git-tree-sha1 = "574baf8110975760d391c710b6341da1afa48d8c" diff --git a/docs/Project.toml b/docs/Project.toml index f03f510a..af94c237 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,4 +1,5 @@ [deps] Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" DocumenterTools = "35a29f4d-8980-5a13-9543-d66fff28ecb8" +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" MLJFlux = "094fc8d1-fd35-5302-93ea-dabda2abf845" diff --git a/docs/make.jl b/docs/make.jl index b189e8af..b1fd3ca6 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,5 +1,6 @@ using Documenter using MLJFlux +using Flux DocMeta.setdocmeta!(MLJFlux, :DocTestSetup, :(using MLJFlux); recursive=true) @@ -12,6 +13,10 @@ makedocs( "https://fonts.googleapis.com/css2?family=Lato:ital,wght@0,100;0,300;0,400;0,700;0,900;1,100;1,300;1,400;1,700;1,900&family=Montserrat:ital,wght@0,100..900;1,100..900&display=swap", class = :css, ), + asset( + "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.4/css/all.min.css", + class = :css, + ) ], repolink="https://github.com/FluxML/MLJFlux.jl" ), diff --git a/docs/src/assets/light.scss b/docs/src/assets/light.scss index f793aea2..c9315294 100644 --- a/docs/src/assets/light.scss +++ b/docs/src/assets/light.scss @@ -1,6 +1,6 @@ @charset "UTF-8"; -$family-sans-serif: 'Montserrat', sans-serif; +$family-sans-serif: 'Lato', sans-serif; $family-monospace: 'Source Code Pro', monospace; $themename: "documenter-light"; // CSS file must be called `$(themename).css` @@ -37,12 +37,15 @@ $themename: "documenter-light"; // CSS file must be called `$(themename).css` .admonition { border-radius: 15px !important; } - - -code.nohighlight.hljs { - background-color: white !important; +.admonition-header { + border-top-left-radius: 14px !important; + border-top-right-radius: 14px !important; } +// code.nohighlight.hljs { +// background-color: white !important; +// } + .grid { diff --git a/docs/src/assets/themes/documenter-light.css b/docs/src/assets/themes/documenter-light.css index 48953353..a6489b46 100644 --- a/docs/src/assets/themes/documenter-light.css +++ b/docs/src/assets/themes/documenter-light.css @@ -10919,6 +10919,10 @@ pre:hover .copy-button { margin-right: 0.75rem; content: "\f06a"; } +.admonition-header { + border-top-left-radius: 14px !important; + border-top-right-radius: 14px !important; +} .admonition-body { color: #222; padding: 0.5rem 0.75rem; @@ -11659,9 +11663,9 @@ code.hljs { .admonition { border-radius: 15px !important; } -code.nohighlight.hljs { +/* code.nohighlight.hljs { background-color: white !important; -} +} */ .grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); diff --git a/docs/src/index.md b/docs/src/index.md index ac01c985..34d6171a 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -1,3 +1,73 @@ # MLJFlux.jl -Documentation for MLJFlux.jl +A Julia package integrating deep learning Flux models with MLJ. + +### Objectives + +- Provide a user-friendly and high-level interface to fundamental [Flux](https://fluxml.ai/Flux.jl/stable/) deep learning models while still being extensible by supporting custom models written with Flux + +- Make building deep learning models more convenient to users already familiar with the MLJ workflow + +- Make it easier to apply machine learning techniques provided by MLJ, including: out-of-sample performance evaluation, hyper-parameter optimization, iteration control, and more, to deep learning models + +!!! note "MLJFlux Coverage" + MLJFlux support is focused on fundamental and widely used deep learning models; sophisticated architectures or techniques such as online learning, reinforcement learning, and adversarial networks are currently beyond its scope. + +Also note that MLJFlux is limited to training models only when all training data fits into memory, though it still supports automatic batching of data. + +### Installation + +```julia +import Pkg +Pkg.activate("my_environment", shared=true) +Pkg.add(["MLJ", "MLJFlux", "Flux"]) +``` +You only need `Flux` if you need to build a custom architecture or experiment with different optimizers, loss functions and activations. + +### Quick Start +First load and instantiate mode: +```@example +using MLJ, Flux, MLJFlux +import RDatasets + +# 1. Load Data +iris = RDatasets.dataset("datasets", "iris"); +y, X = unpack(iris, ==(:Species), colname -> true, rng=123); + +# 2. Load and instantiate model +NeuralNetworkClassifier = @load NeuralNetworkClassifier pkg="MLJFlux" +clf = NeuralNetworkClassifier( + builder=MLJFlux.MLP(; hidden=(5,4), σ=Flux.relu), + optimiser=Flux.ADAM(0.01), + batch_size=8, + epochs=100, + acceleration=CUDALibs() + ) + +# 3. Wrap it in a machine in fit +mach = machine(clf, X, y) +fit!(mach) + +# 4. Evaluate the model +cv=CV(nfolds=5) +evaluate!(mach, resampling=cv, measure=accuracy) +``` +As you can see we were able to use MLJ functionality (i.e., cross validation) with a Flux deep learning model. All arguments provided also have defaults. + +Notice that we were also able to define the neural network in a high-level fashion by only specifying the number of neurons per each hidden layer and the activation function. Meanwhile, `MLJFlux` was able to infer the input and output layer as well as use a suitable default for the loss function and output activation given the classification task. + +### Flux or MLJFlux? +[Flux](https://fluxml.ai/Flux.jl/stable/) is a deep learning framework in Julia that comes with everything you need to build deep learning models (i.e., GPU support, automatic differentiation, layers, activations, losses, optimizers, etc.). [MLJFlux](https://github.com/FluxML/MLJFlux.jl) wraps models built with Flux which provides a more high-level interface for building and training such models. More importantly, it empowers Flux models by extending their support to many common machine learning workflows that are possible via MLJ such as: + +- **Estimating performance** of your model using a holdout set or other resampling strategy (e.g., cross-validation) as measured by one or more metrics (e.g., loss functions) that may not have been used in training + +- **Optimizing hyper-parameters** such as a regularization parameter (e.g., dropout) or a width/height/nchannnels of convolution layer + +- **Compose with other models** such as introducing data pre-processing steps (e.g., missing data imputation) into a pipeline. It might make sense to include non-deep learning models in this pipeline. Other kinds of model composition could include blending predictions of a deep learner with some other kind of model (as in “model stacking”). Models composed with MLJ can be also tuned as a single unit. + +- **Controlling iteration** by adding an early stopping criterion based on an out-of-sample estimate of the loss, dynamically changing the learning rate (eg, cyclic learning rates), periodically save snapshots of the model, generate live plots of sample weights to judge training progress (as in tensor board) + + +- **Comparing** your model with a non-deep learning models + +Thus, for model that could be implemented in both `Flux` and `MLJFlux`, one could choose working with `MLJFlux` instead of `Flux` if they are interested in any of the functionality above, while not willing to implement it from scratch and/or when they would prefer working with a more high-level interface equivalent to that of MLJ for their task. \ No newline at end of file