Skip to content

Commit

Permalink
documentation update (#2422)
Browse files Browse the repository at this point in the history
* update docs

* update docs

* update docs

* fixes for mlp tutorial

* compat

* blitz update and model zoo page
  • Loading branch information
CarloLucibello authored Apr 17, 2024
1 parent 90a3dfe commit d7a3bb5
Show file tree
Hide file tree
Showing 40 changed files with 449 additions and 612 deletions.
9 changes: 4 additions & 5 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
ImageCore = "a09fc81d-aa75-5fe9-8630-4744c3626534"
ImageShow = "4e3cecfd-b093-5904-9786-8bbb286a6a31"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54"
MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
Expand Down Expand Up @@ -48,7 +50,7 @@ MacroTools = "0.5"
Metal = "0.5, 1"
NNlib = "0.9.1"
OneHotArrays = "0.2.4"
Optimisers = "0.3.2"
Optimisers = "0.3.3"
Preferences = "1"
ProgressLogging = "0.1"
Reexport = "1.0"
Expand Down Expand Up @@ -77,7 +79,4 @@ Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
cuDNN = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd"

[targets]
test = ["Test", "Documenter", "IterTools", "LinearAlgebra", "FillArrays",
"ComponentArrays", "BSON", "Pkg", "CUDA", "cuDNN", "Metal", "AMDGPU",
"Enzyme", "FiniteDifferences", "Tracker"]

test = ["Test", "Documenter", "IterTools", "LinearAlgebra", "FillArrays", "ComponentArrays", "BSON", "Pkg", "CUDA", "cuDNN", "Metal", "AMDGPU", "Enzyme", "FiniteDifferences", "Tracker"]
2 changes: 1 addition & 1 deletion docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
Documenter = "0.27"
Documenter = "1.3"
69 changes: 30 additions & 39 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,48 +13,48 @@ makedocs(
# You could read this end-to-end, or skip to what you need.
# Aim is to cover each new concept exactly once (but not list all variants).
# Hard to invent further divisions which aren't more confusing than helpful?
"Quick Start" => "models/quickstart.md",
"Fitting a Line" => "models/overview.md",
"Gradients and Layers" => "models/basics.md",
"Training" => "training/training.md",
"Recurrence" => "models/recurrence.md",
"GPU Support" => "gpu.md",
"Saving & Loading" => "saving.md",
"Performance Tips" => "performance.md",
"Quick Start" => "guide/models/quickstart.md",
"Fitting a Line" => "guide/models/overview.md",
"Gradients and Layers" => "guide/models/basics.md",
"Custom Layers" => "guide/models/custom_layers.md",
"Training" => "guide/training/training.md",
"Recurrence" => "guide/models/recurrence.md",
"GPU Support" => "guide/gpu.md",
"Saving & Loading" => "guide/saving.md",
"Performance Tips" => "guide/performance.md",
],
"Ecosystem" => "ecosystem.md",
"Reference" => [
# This essentially collects docstrings, with a bit of introduction.
"Built-in Layers" => "models/layers.md",
"Activation Functions" => "models/activation.md",
"Weight Initialisation" => "utilities.md",
"Loss Functions" => "models/losses.md",
"Training API" => "training/reference.md",
"Optimisation Rules" => "training/optimisers.md",
"Shape Inference" => "outputsize.md",
"Flat vs. Nested" => "destructure.md",
"Callback Helpers" => "training/callbacks.md",
"Gradients -- Zygote.jl" => "training/zygote.md",
"Batching Data -- MLUtils.jl" => "data/mlutils.md",
"OneHotArrays.jl" => "data/onehot.md",
"Low-level Operations -- NNlib.jl" => "models/nnlib.md",
"Nested Structures -- Functors.jl" => "models/functors.md",
"Built-in Layers" => "reference/models/layers.md",
"Activation Functions" => "reference/models/activation.md",
"Weight Initialisation" => "reference/utilities.md",
"Loss Functions" => "reference/models/losses.md",
"Training API" => "reference/training/reference.md",
"Optimisation Rules" => "reference/training/optimisers.md",
"Shape Inference" => "reference/outputsize.md",
"Flat vs. Nested" => "reference/destructure.md",
"Callback Helpers" => "reference/training/callbacks.md",
"Gradients -- Zygote.jl" => "reference/training/zygote.md",
"Batching Data -- MLUtils.jl" => "reference/data/mlutils.md",
"OneHotArrays.jl" => "reference/data/onehot.md",
"Low-level Operations -- NNlib.jl" => "reference/models/nnlib.md",
"Nested Structures -- Functors.jl" => "reference/models/functors.md",
],
"Tutorials" => [
# These walk you through various tasks. It's fine if they overlap quite a lot.
# All the website tutorials can move here, perhaps much of the model zoo too?
# Or perhaps those should just be trashed, model zoo versions are newer & more useful.
"Linear Regression" => "tutorials/linear_regression.md",
"Logistic Regression" => "tutorials/logistic_regression.md",
"Model Zoo" => "tutorials/model_zoo.md",
#=
"Julia & Flux: 60 Minute Blitz" => "tutorials/2020-09-15-deep-learning-flux.md",
"Multi-layer Perceptron" => "tutorials/2021-01-26-mlp.md",
# "Multi-layer Perceptron" => "tutorials/mlp.md",
# "Julia & Flux: 60 Minute Blitz" => "tutorials/blitz.md",
"Simple ConvNet" => "tutorials/2021-02-07-convnet.md",
"Generative Adversarial Net" => "tutorials/2021-10-14-vanilla-gan.md",
"Deep Convolutional GAN" => "tutorials/2021-10-08-dcgan-mnist.md",
=#
# Not really sure where this belongs... some in Fluxperimental, aim to delete?
"Custom Layers" => "models/advanced.md", # TODO move freezing to Training
],
],
format = Documenter.HTML(
Expand All @@ -63,19 +63,10 @@ makedocs(
assets = ["assets/flux.css"],
prettyurls = get(ENV, "CI", nothing) == "true"
),
doctest = false,
# linkcheck = true,
checkdocs = :exports,
# strict = true,
# strict = [
# :cross_references,
# :missing_docs,
# :doctest,
# :linkcheck,
# :parse_error,
# :example_block,
# :autodocs_block, :docs_block, :eval_block, :example_block, :footnote, :meta_block, :setup_block
# ],
doctest = false, # done later
checkdocs = :none, # :exports # Do not check if all functions appear in the docs
# since it considers all packages
warnonly = [:cross_references]
)

doctest(Flux) # only test Flux modules
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ dcgan_init(shape...) = randn(Float32, shape) * 0.02f0
```julia
function Generator(latent_dim)
Chain(
Dense(latent_dim, 7*7*256, bias=false),
Dense(latent_dim => 7*7*256, bias=false),
BatchNorm(7*7*256, relu),

x -> reshape(x, 7, 7, 256, :),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,13 +96,13 @@ calling the model in a gradient context. As a final non-linearity, we use the
`sigmoid` activation function.

```julia
discriminator = Chain(Dense(n_features, 1024, x -> leakyrelu(x, 0.2f0)),
discriminator = Chain(Dense(n_features => 1024, x -> leakyrelu(x, 0.2f0)),
Dropout(0.3),
Dense(1024, 512, x -> leakyrelu(x, 0.2f0)),
Dense(1024 => 512, x -> leakyrelu(x, 0.2f0)),
Dropout(0.3),
Dense(512, 256, x -> leakyrelu(x, 0.2f0)),
Dense(512 => 256, x -> leakyrelu(x, 0.2f0)),
Dropout(0.3),
Dense(256, 1, sigmoid)) |> gpu
Dense(256 => 1, sigmoid)) |> gpu
```

Let's define the generator in a similar fashion. This network maps a latent
Expand All @@ -113,9 +113,9 @@ the training data onto.

```julia
generator = Chain(Dense(latent_dim, 256, x -> leakyrelu(x, 0.2f0)),
Dense(256, 512, x -> leakyrelu(x, 0.2f0)),
Dense(512, 1024, x -> leakyrelu(x, 0.2f0)),
Dense(1024, n_features, tanh)) |> gpu
Dense(256 => 512, x -> leakyrelu(x, 0.2f0)),
Dense(512 => 1024, x -> leakyrelu(x, 0.2f0)),
Dense(1024 => n_features, tanh)) |> gpu
```


Expand Down
Loading

0 comments on commit d7a3bb5

Please sign in to comment.