diff --git a/dev/.documenter-siteinfo.json b/dev/.documenter-siteinfo.json index 5d87b3b7a76..1549781c3d7 100644 --- a/dev/.documenter-siteinfo.json +++ b/dev/.documenter-siteinfo.json @@ -1 +1 @@ -{"documenter":{"julia_version":"1.9.3","generation_timestamp":"2023-10-20T10:24:03","documenter_version":"1.1.1"}} \ No newline at end of file +{"documenter":{"julia_version":"1.9.3","generation_timestamp":"2023-10-20T16:30:17","documenter_version":"1.1.1"}} \ No newline at end of file diff --git a/dev/assets/Manifest.toml b/dev/assets/Manifest.toml index e7ca0cf6e4b..8861487957a 100644 --- a/dev/assets/Manifest.toml +++ b/dev/assets/Manifest.toml @@ -2469,9 +2469,9 @@ version = "0.3.6" [[deps.SciMLSensitivity]] deps = ["ADTypes", "Adapt", "ArrayInterface", "ChainRulesCore", "DiffEqBase", "DiffEqCallbacks", "DiffEqNoiseProcess", "Distributions", "EllipsisNotation", "Enzyme", "FiniteDiff", "ForwardDiff", "FunctionProperties", "FunctionWrappersWrappers", "Functors", "GPUArraysCore", "LinearAlgebra", "LinearSolve", "Markdown", "OrdinaryDiffEq", "Parameters", "PreallocationTools", "QuadGK", "Random", "RandomNumbers", "RecursiveArrayTools", "Reexport", "ReverseDiff", "SciMLBase", "SciMLOperators", "SimpleNonlinearSolve", "SparseDiffTools", "StaticArraysCore", "Statistics", "StochasticDiffEq", "Tracker", "TruncatedStacktraces", "Zygote", "ZygoteRules"] -git-tree-sha1 = "c6ae601b93ce2adf39a5bc6c690b4776d3ab0b71" +git-tree-sha1 = "28bf869bd2b91f19deff93c9e70553e5d50eb5ff" uuid = "1ed8b502-d754-442c-8d5d-10ac956f44a1" -version = "7.44.0" +version = "7.45.0" [[deps.ScientificTypesBase]] git-tree-sha1 = "a8e18eb383b5ecf1b5e6fc237eb39255044fd92b" @@ -2792,9 +2792,9 @@ version = "1.0.1" [[deps.Tables]] deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits"] -git-tree-sha1 = "a1f34829d5ac0ef499f6d84428bd6b4c71f02ead" +git-tree-sha1 = "cb76cf677714c095e535e3501ac7954732aeea2d" uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" -version = "1.11.0" +version = "1.11.1" [[deps.Tar]] deps = ["ArgTools", "SHA"] diff --git a/dev/comparisons/cppfortran/index.html b/dev/comparisons/cppfortran/index.html index 5d6b66a8e36..8e2237ec5e4 100644 --- a/dev/comparisons/cppfortran/index.html +++ b/dev/comparisons/cppfortran/index.html @@ -1,2 +1,2 @@ -Getting Started with Julia's SciML for the C++/Fortran User · Overview of Julia's SciML

Getting Started with Julia's SciML for the C++/Fortran User

You don't need help if you're a Fortran guru. I'm just kidding, you're not a Lisp developer. If you're coming from C++ or Fortran, you may be familiar with high-performance computing environments similar to SciML, such as PETSc, Trilinos, or Sundials. The following are some points to help the transition.

Why SciML? High-Level Workflow Reasons

If you're coming from “hardcore” C++/Fortran computing environments, some things to check out with Julia's SciML are:

  • Interactivity - use the interactive REPL to easily investigate numerical details.
  • Metaprogramming performance tools - tools like LoopVectorization.jl can be used to generate faster code than even some of the most hand-optimized C++/Fortran code. Current benchmarks show this SIMD-optimized Julia code outperforming OpenBLAS and MKL BLAS implementations in many performance regimes.
  • Symbolic modeling languages - writing models by hand can leave a lot of performance on the table. Using high-level modeling tools like ModelingToolkit can automate symbolic simplifications, which improve the stability and performance of numerical solvers. On complex models, even the best handwritten C++/Fortran code is orders of magnitude behind the code that symbolic tearing algorithms can achieve!
  • Composable Library Components - In C++/Fortran environments, every package feels like a silo. Arrays made for PETSc cannot easily be used in Trilinos, and converting Sundials NVector outputs to DataFrames for post-simulation data processing is a process itself. The Julia SciML environment embraces interoperability. Don't wait for SciML to do it: by using generic coding with JIT compilation, these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • Wrappers to the Libraries You Know and Trust - Moving to SciML does not have to be a quick transition. SciML has extensive wrappers to many widely-used classical solver environments such as SUNDIALS and Hairer's classic Fortran ODE solvers (dopri5, dop853, etc.). Using these wrapped solvers is painless and can be swapped in for the Julia versions with one line of code. This gives you a way to incrementally adopt new features/methods while retaining the older pieces you know and trust.
  • Don't Start from Scratch - SciML builds on the extensive Base library of Julia, and thus grows and improves with every update to the language. With hundreds of monthly contributors to SciML and hundreds of monthly contributors to Julia, SciML is one of the most actively developed open-source scientific computing ecosystems out there!
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, Sundials/Hairer in purple/red represent C++/Fortrans most commonly used solvers:

Why SciML? Some Technical Details

Let's face the facts, in the open benchmarks the pure-Julia solvers tend to outperform the classic “best” C++ and Fortran solvers in almost every example (with a few notable exceptions). But why?

The answer is two-fold: Julia is as fast as C++/Fortran, and the algorithms are what matter.

Julia is as Fast as C++/Fortran

While Julia code looks high level like Python or MATLAB, its performance is on par with C++ and Fortran. At a technical level, when Julia code is type-stable, i.e. that the types that are returned from a function are deducible at compile-time from the types that go into a function, then Julia can optimize it as much as C++ or Fortran by automatically devirtualizing all dynamic behavior and compile-time optimizing the quasi-static code. This is not an empirical statement, it's a provable type-theoretic result. The resulting compiler used on the resulting quasi-static representation is LLVM, the same optimizing compiler used by clang and LFortran.

For more details on how Julia code is optimized and how to optimize your own Julia code, check out this chapter from the SciML Book.

SciML's Julia Algorithms Have Performance Advantages in Many Common Regimes

There are many ways which Julia's algorithms achieve performance advantages. Some facts to highlight include:

Let's Dig Deep Into One Case: Adjoints of ODEs for Solving Inverse Problems

To really highlight how JIT compilation and automatic differentiation integration can change algorithms, let's look at the problem of differentiating an ODE solver. As is derived and discussed in detail at a seminar with the American Statistical Association, there are many ways to implement well-known “adjoint” methods which are required for performance. Each has different stability and performance trade-offs, and Julia's SciML is the only system to systemically offer all of the trade-off options. In many cases, using analytical adjoints of a solver is not advised due to performance reasons, with the trade-off described in detail here. Likewise, even when analytical adjoints are used, it turns out that for general nonlinear equations there is a trick which uses automatic differentiation in the construction of the analytical adjoint to improve its performance. As demonstrated in this publication, this can lead to about 2-3 orders of magnitude performance improvements. These AD-enhanced adjoints are showcased as the seeding methods in this plot:

Unless one directly defines special “vjp” functions, this is how the Julia SciML methods achieve orders of magnitude performance advantages over CVODES's adjoints and PETSC's TS-adjoint.

Moral of the story, even there are many reasons to use automatic differentiation of a solver, and even if an analytical adjoint rule is used for some specific performance reason, that analytical expression can often times be accelerated by orders of magnitude itself by embedding some form of automatic differentiation into it. This is just one algorithm of many which are optimized in this fashion.

+Getting Started with Julia's SciML for the C++/Fortran User · Overview of Julia's SciML

Getting Started with Julia's SciML for the C++/Fortran User

You don't need help if you're a Fortran guru. I'm just kidding, you're not a Lisp developer. If you're coming from C++ or Fortran, you may be familiar with high-performance computing environments similar to SciML, such as PETSc, Trilinos, or Sundials. The following are some points to help the transition.

Why SciML? High-Level Workflow Reasons

If you're coming from “hardcore” C++/Fortran computing environments, some things to check out with Julia's SciML are:

  • Interactivity - use the interactive REPL to easily investigate numerical details.
  • Metaprogramming performance tools - tools like LoopVectorization.jl can be used to generate faster code than even some of the most hand-optimized C++/Fortran code. Current benchmarks show this SIMD-optimized Julia code outperforming OpenBLAS and MKL BLAS implementations in many performance regimes.
  • Symbolic modeling languages - writing models by hand can leave a lot of performance on the table. Using high-level modeling tools like ModelingToolkit can automate symbolic simplifications, which improve the stability and performance of numerical solvers. On complex models, even the best handwritten C++/Fortran code is orders of magnitude behind the code that symbolic tearing algorithms can achieve!
  • Composable Library Components - In C++/Fortran environments, every package feels like a silo. Arrays made for PETSc cannot easily be used in Trilinos, and converting Sundials NVector outputs to DataFrames for post-simulation data processing is a process itself. The Julia SciML environment embraces interoperability. Don't wait for SciML to do it: by using generic coding with JIT compilation, these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • Wrappers to the Libraries You Know and Trust - Moving to SciML does not have to be a quick transition. SciML has extensive wrappers to many widely-used classical solver environments such as SUNDIALS and Hairer's classic Fortran ODE solvers (dopri5, dop853, etc.). Using these wrapped solvers is painless and can be swapped in for the Julia versions with one line of code. This gives you a way to incrementally adopt new features/methods while retaining the older pieces you know and trust.
  • Don't Start from Scratch - SciML builds on the extensive Base library of Julia, and thus grows and improves with every update to the language. With hundreds of monthly contributors to SciML and hundreds of monthly contributors to Julia, SciML is one of the most actively developed open-source scientific computing ecosystems out there!
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, Sundials/Hairer in purple/red represent C++/Fortrans most commonly used solvers:

Why SciML? Some Technical Details

Let's face the facts, in the open benchmarks the pure-Julia solvers tend to outperform the classic “best” C++ and Fortran solvers in almost every example (with a few notable exceptions). But why?

The answer is two-fold: Julia is as fast as C++/Fortran, and the algorithms are what matter.

Julia is as Fast as C++/Fortran

While Julia code looks high level like Python or MATLAB, its performance is on par with C++ and Fortran. At a technical level, when Julia code is type-stable, i.e. that the types that are returned from a function are deducible at compile-time from the types that go into a function, then Julia can optimize it as much as C++ or Fortran by automatically devirtualizing all dynamic behavior and compile-time optimizing the quasi-static code. This is not an empirical statement, it's a provable type-theoretic result. The resulting compiler used on the resulting quasi-static representation is LLVM, the same optimizing compiler used by clang and LFortran.

For more details on how Julia code is optimized and how to optimize your own Julia code, check out this chapter from the SciML Book.

SciML's Julia Algorithms Have Performance Advantages in Many Common Regimes

There are many ways which Julia's algorithms achieve performance advantages. Some facts to highlight include:

Let's Dig Deep Into One Case: Adjoints of ODEs for Solving Inverse Problems

To really highlight how JIT compilation and automatic differentiation integration can change algorithms, let's look at the problem of differentiating an ODE solver. As is derived and discussed in detail at a seminar with the American Statistical Association, there are many ways to implement well-known “adjoint” methods which are required for performance. Each has different stability and performance trade-offs, and Julia's SciML is the only system to systemically offer all of the trade-off options. In many cases, using analytical adjoints of a solver is not advised due to performance reasons, with the trade-off described in detail here. Likewise, even when analytical adjoints are used, it turns out that for general nonlinear equations there is a trick which uses automatic differentiation in the construction of the analytical adjoint to improve its performance. As demonstrated in this publication, this can lead to about 2-3 orders of magnitude performance improvements. These AD-enhanced adjoints are showcased as the seeding methods in this plot:

Unless one directly defines special “vjp” functions, this is how the Julia SciML methods achieve orders of magnitude performance advantages over CVODES's adjoints and PETSC's TS-adjoint.

Moral of the story, even there are many reasons to use automatic differentiation of a solver, and even if an analytical adjoint rule is used for some specific performance reason, that analytical expression can often times be accelerated by orders of magnitude itself by embedding some form of automatic differentiation into it. This is just one algorithm of many which are optimized in this fashion.

diff --git a/dev/comparisons/matlab/index.html b/dev/comparisons/matlab/index.html index 5bb6d26b266..a20e9fc1421 100644 --- a/dev/comparisons/matlab/index.html +++ b/dev/comparisons/matlab/index.html @@ -1,2 +1,2 @@ -Getting Started with Julia's SciML for the MATLAB User · Overview of Julia's SciML

Getting Started with Julia's SciML for the MATLAB User

If you're a MATLAB user who has looked into Julia for some performance improvements, you may have noticed that the standard library does not have all of the “batteries” included with a base MATLAB installation. Where's the ODE solver? Where's fmincon and fsolve? Those scientific computing functionalities are the pieces provided by the Julia SciML ecosystem!

Why SciML? High-Level Workflow Reasons

  • Performance - The key reason people are moving from MATLAB to Julia's SciML in droves is performance. Even simple ODE solvers are much faster!, demonstrating orders of magnitude performance improvements for differential equations, nonlinear solving, optimization, and more. And the performance advantages continue to grow as more complex algorithms are required.
  • Julia is quick to learn from MATLAB - Most ODE codes can be translated in a few minutes. If you need help, check out the QuantEcon MATLAB-Python-Julia Cheat Sheet.
  • Package Management and Versioning - Julia's package manager takes care of dependency management, testing, and continuous delivery in order to make the installation and maintenance process smoother. For package users, this means it's easier to get packages with complex functionality in your hands.
  • Free and Open Source - If you want to know how things are being computed, just look at our GitHub organization. Lots of individuals use Julia's SciML to research how the algorithms actually work because of how accessible and tweakable the ecosystem is!
  • Composable Library Components - In MATLAB environments, every package feels like a silo. Functions made for one file exchange library cannot easily compose with another. SciML's generic coding with JIT compilation these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, MATLAB in orange represents MATLAB's most commonly used solvers:

Need a case study?

Check out this talk from NASA Scientists getting a 15,000x acceleration by switching from Simulink to Julia's ModelingToolkit!

Need Help Translating from MATLAB to Julia?

The following resources can be particularly helpful when adopting Julia for SciML for the first time:

MATLAB to Julia SciML Functionality Translations

The following chart will help you get quickly acquainted with Julia's SciML Tools:

MATLAB FunctionSciML-Supported Julia packages
plotPlots, Makie
sparseSparseArrays
interp1DataInterpolations
\, gmres, cgLinearSolve
fsolveNonlinearSolve
quadIntegrals
fminconOptimization
odeXXDifferentialEquations
ode45Tsit5
ode113VCABM
ode23sRosenbrock23
ode15sQNDF or FBDF
ode15iIDA
bvp4c and bvp5cDifferentialEquations
Simulink, SimscapeModelingToolkit
fftFFTW
chebfunApproxFun
+Getting Started with Julia's SciML for the MATLAB User · Overview of Julia's SciML

Getting Started with Julia's SciML for the MATLAB User

If you're a MATLAB user who has looked into Julia for some performance improvements, you may have noticed that the standard library does not have all of the “batteries” included with a base MATLAB installation. Where's the ODE solver? Where's fmincon and fsolve? Those scientific computing functionalities are the pieces provided by the Julia SciML ecosystem!

Why SciML? High-Level Workflow Reasons

  • Performance - The key reason people are moving from MATLAB to Julia's SciML in droves is performance. Even simple ODE solvers are much faster!, demonstrating orders of magnitude performance improvements for differential equations, nonlinear solving, optimization, and more. And the performance advantages continue to grow as more complex algorithms are required.
  • Julia is quick to learn from MATLAB - Most ODE codes can be translated in a few minutes. If you need help, check out the QuantEcon MATLAB-Python-Julia Cheat Sheet.
  • Package Management and Versioning - Julia's package manager takes care of dependency management, testing, and continuous delivery in order to make the installation and maintenance process smoother. For package users, this means it's easier to get packages with complex functionality in your hands.
  • Free and Open Source - If you want to know how things are being computed, just look at our GitHub organization. Lots of individuals use Julia's SciML to research how the algorithms actually work because of how accessible and tweakable the ecosystem is!
  • Composable Library Components - In MATLAB environments, every package feels like a silo. Functions made for one file exchange library cannot easily compose with another. SciML's generic coding with JIT compilation these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, MATLAB in orange represents MATLAB's most commonly used solvers:

Need a case study?

Check out this talk from NASA Scientists getting a 15,000x acceleration by switching from Simulink to Julia's ModelingToolkit!

Need Help Translating from MATLAB to Julia?

The following resources can be particularly helpful when adopting Julia for SciML for the first time:

MATLAB to Julia SciML Functionality Translations

The following chart will help you get quickly acquainted with Julia's SciML Tools:

MATLAB FunctionSciML-Supported Julia packages
plotPlots, Makie
sparseSparseArrays
interp1DataInterpolations
\, gmres, cgLinearSolve
fsolveNonlinearSolve
quadIntegrals
fminconOptimization
odeXXDifferentialEquations
ode45Tsit5
ode113VCABM
ode23sRosenbrock23
ode15sQNDF or FBDF
ode15iIDA
bvp4c and bvp5cDifferentialEquations
Simulink, SimscapeModelingToolkit
fftFFTW
chebfunApproxFun
diff --git a/dev/comparisons/python/index.html b/dev/comparisons/python/index.html index 1324e465416..42d826c3324 100644 --- a/dev/comparisons/python/index.html +++ b/dev/comparisons/python/index.html @@ -1,2 +1,2 @@ -Getting Started with Julia's SciML for the Python User · Overview of Julia's SciML

Getting Started with Julia's SciML for the Python User

If you're a Python user who has looked into Julia, you're probably wondering what is the equivalent to SciPy is. And you found it: it's the SciML ecosystem! To a Python developer, SciML is SciPy, but with the high-performance GPU, capabilities of PyTorch, and neural network capabilities, all baked right in. With SciML, there is no “separate world” of machine learning sublanguages: there is just one cohesive package ecosystem.

Why SciML? High-Level Workflow Reasons

  • Performance - The key reason people are moving from SciPy to Julia's SciML in droves is performance. Even simple ODE solvers are much faster!, demonstrating orders of magnitude performance improvements for differential equations, nonlinear solving, optimization, and more. And the performance advantages continue to grow as more complex algorithms are required.
  • Package Management and Versioning - Julia's package manager takes care of dependency management, testing, and continuous delivery in order to make the installation and maintenance process smoother. For package users, this means it's easier to get packages with complex functionality in your hands.
  • Composable Library Components - In Python environments, every package feels like a silo. Functions made for one file exchange library cannot easily compose with another. SciML's generic coding with JIT compilation these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, SciPy in yellow represents Python's most commonly used solvers:

Need Help Translating from Python to Julia?

The following resources can be particularly helpful when adopting Julia for SciML for the first time:

Python to Julia SciML Functionality Translations

The following chart will help you get quickly acquainted with Julia's SciML Tools:

Workflow ElementSciML-Supported Julia packages
MatplotlibPlots, Makie
scipy.specialSpecialFunctions
scipy.linalg.solveLinearSolve
scipy.integrateIntegrals
scipy.optimizeOptimization
scipy.optimize.fsolveNonlinearSolve
scipy.interpolateDataInterpolations
scipy.fftFFTW
scipy.linalgJulia's Built-In Linear Algebra
scipy.sparseSparseArrays, ARPACK
odeint/solve_ivpDifferentialEquations
scipy.integrate.solve_bvpBoundary-value problem
PyTorchFlux, Lux
gillespy2Catalyst, JumpProcesses
scipy.optimize.approx_fprimeFiniteDiff
autogradForwardDiff*, Enzyme*, DiffEqSensitivity
StanTuring
sympySymbolics

Why is Differentiable Programming Important for Scientific Computing?

Check out this blog post that goes into detail on how training neural networks in tandem with simulation improves performance by orders of magnitude. But can't you use analytical adjoint definitions? You can, but there are tricks to mix automatic differentiation into the adjoint definitions for a few orders of magnitude improvement too, as explained in this blog post.

These facts, along with many others, compose to algorithmic improvements with the implementation improvements, which leads to orders of magnitude improvements!

+Getting Started with Julia's SciML for the Python User · Overview of Julia's SciML

Getting Started with Julia's SciML for the Python User

If you're a Python user who has looked into Julia, you're probably wondering what is the equivalent to SciPy is. And you found it: it's the SciML ecosystem! To a Python developer, SciML is SciPy, but with the high-performance GPU, capabilities of PyTorch, and neural network capabilities, all baked right in. With SciML, there is no “separate world” of machine learning sublanguages: there is just one cohesive package ecosystem.

Why SciML? High-Level Workflow Reasons

  • Performance - The key reason people are moving from SciPy to Julia's SciML in droves is performance. Even simple ODE solvers are much faster!, demonstrating orders of magnitude performance improvements for differential equations, nonlinear solving, optimization, and more. And the performance advantages continue to grow as more complex algorithms are required.
  • Package Management and Versioning - Julia's package manager takes care of dependency management, testing, and continuous delivery in order to make the installation and maintenance process smoother. For package users, this means it's easier to get packages with complex functionality in your hands.
  • Composable Library Components - In Python environments, every package feels like a silo. Functions made for one file exchange library cannot easily compose with another. SciML's generic coding with JIT compilation these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, SciPy in yellow represents Python's most commonly used solvers:

Need Help Translating from Python to Julia?

The following resources can be particularly helpful when adopting Julia for SciML for the first time:

Python to Julia SciML Functionality Translations

The following chart will help you get quickly acquainted with Julia's SciML Tools:

Workflow ElementSciML-Supported Julia packages
MatplotlibPlots, Makie
scipy.specialSpecialFunctions
scipy.linalg.solveLinearSolve
scipy.integrateIntegrals
scipy.optimizeOptimization
scipy.optimize.fsolveNonlinearSolve
scipy.interpolateDataInterpolations
scipy.fftFFTW
scipy.linalgJulia's Built-In Linear Algebra
scipy.sparseSparseArrays, ARPACK
odeint/solve_ivpDifferentialEquations
scipy.integrate.solve_bvpBoundary-value problem
PyTorchFlux, Lux
gillespy2Catalyst, JumpProcesses
scipy.optimize.approx_fprimeFiniteDiff
autogradForwardDiff*, Enzyme*, DiffEqSensitivity
StanTuring
sympySymbolics

Why is Differentiable Programming Important for Scientific Computing?

Check out this blog post that goes into detail on how training neural networks in tandem with simulation improves performance by orders of magnitude. But can't you use analytical adjoint definitions? You can, but there are tricks to mix automatic differentiation into the adjoint definitions for a few orders of magnitude improvement too, as explained in this blog post.

These facts, along with many others, compose to algorithmic improvements with the implementation improvements, which leads to orders of magnitude improvements!

diff --git a/dev/comparisons/r/index.html b/dev/comparisons/r/index.html index 36cf6c3128f..494ab5e7233 100644 --- a/dev/comparisons/r/index.html +++ b/dev/comparisons/r/index.html @@ -1,2 +1,2 @@ -Getting Started with Julia's SciML for the R User · Overview of Julia's SciML

Getting Started with Julia's SciML for the R User

If you're an R user who has looked into Julia, you're probably wondering where all of the scientific computing packages are. How do I solve ODEs? Solve f(x)=0 for x? Etc. SciML is the ecosystem for doing this with Julia.

Why SciML? High-Level Workflow Reasons

  • Performance - The key reason people are moving from R to Julia's SciML in droves is performance. Even simple ODE solvers are much faster!, demonstrating orders of magnitude performance improvements for differential equations, nonlinear solving, optimization, and more. And the performance advantages continue to grow as more complex algorithms are required.
  • Composable Library Components - In R environments, every package feels like a silo. Functions made for one file exchange library cannot easily compose with another. SciML's generic coding with JIT compilation these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • A Global Harmonious Documentation for Scientific Computing - R's documentation for scientific computing is scattered in a bunch of individual packages where the developers do not talk to each other! This not only leads to documentation differences, but also “style” differences: one package uses tol while the other uses atol. With Julia's SciML, the whole ecosystem is considered together, and inconsistencies are handled at the global level. The goal is to be working in one environment with one language.
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, deSolve in blue represents R's most commonly used solver:

Need Help Translating from R to Julia?

The following resources can be particularly helpful when adopting Julia for SciML for the first time:

R to Julia SciML Functionality Translations

The following chart will help you get quickly acquainted with Julia's SciML Tools:

R Function/PackageSciML-Supported Julia packages
data.frameDataFrames
plotPlots, Makie
ggplot2AlgebraOfGraphics
deSolveDifferentialEquations
StanTuring

Want to See the Power of Julia?

Check out this R-Bloggers blog post on diffeqr, a package which uses ModelingToolkit to translate R code to Julia, and achieves 350x acceleration over R's popular deSolve ODE solver package. But when the solve is done purely in Julia, it achieves 2777x acceleration over deSolve!

+Getting Started with Julia's SciML for the R User · Overview of Julia's SciML

Getting Started with Julia's SciML for the R User

If you're an R user who has looked into Julia, you're probably wondering where all of the scientific computing packages are. How do I solve ODEs? Solve f(x)=0 for x? Etc. SciML is the ecosystem for doing this with Julia.

Why SciML? High-Level Workflow Reasons

  • Performance - The key reason people are moving from R to Julia's SciML in droves is performance. Even simple ODE solvers are much faster!, demonstrating orders of magnitude performance improvements for differential equations, nonlinear solving, optimization, and more. And the performance advantages continue to grow as more complex algorithms are required.
  • Composable Library Components - In R environments, every package feels like a silo. Functions made for one file exchange library cannot easily compose with another. SciML's generic coding with JIT compilation these connections create new optimized code on the fly and allow for a more expansive feature set than can ever be documented. Take new high-precision number types from a package and stick them into a nonlinear solver. Take a package for Intel GPU arrays and stick it into the differential equation solver to use specialized hardware acceleration.
  • A Global Harmonious Documentation for Scientific Computing - R's documentation for scientific computing is scattered in a bunch of individual packages where the developers do not talk to each other! This not only leads to documentation differences, but also “style” differences: one package uses tol while the other uses atol. With Julia's SciML, the whole ecosystem is considered together, and inconsistencies are handled at the global level. The goal is to be working in one environment with one language.
  • Easier High-Performance and Parallel Computing - With Julia's ecosystem, CUDA will automatically install of the required binaries and cu(A)*cu(B) is then all that's required to GPU-accelerate large-scale linear algebra. MPI is easy to install and use. Distributed computing through password-less SSH. Multithreading is automatic and baked into many libraries, with a specialized algorithm to ensure hierarchical usage does not oversubscribe threads. Basically, libraries give you a lot of parallelism for free, and doing the rest is a piece of cake.
  • Mix Scientific Computing with Machine Learning - Want to automate the discovery of missing physical laws using neural networks embedded in differentiable simulations? Julia's SciML is the ecosystem with the tooling to integrate machine learning into the traditional high-performance scientific computing domains, from multiphysics simulations to partial differential equations.

In this plot, deSolve in blue represents R's most commonly used solver:

Need Help Translating from R to Julia?

The following resources can be particularly helpful when adopting Julia for SciML for the first time:

R to Julia SciML Functionality Translations

The following chart will help you get quickly acquainted with Julia's SciML Tools:

R Function/PackageSciML-Supported Julia packages
data.frameDataFrames
plotPlots, Makie
ggplot2AlgebraOfGraphics
deSolveDifferentialEquations
StanTuring

Want to See the Power of Julia?

Check out this R-Bloggers blog post on diffeqr, a package which uses ModelingToolkit to translate R code to Julia, and achieves 350x acceleration over R's popular deSolve ODE solver package. But when the solve is done purely in Julia, it achieves 2777x acceleration over deSolve!

diff --git a/dev/getting_started/find_root/index.html b/dev/getting_started/find_root/index.html index a38c926902d..e7629ff6231 100644 --- a/dev/getting_started/find_root/index.html +++ b/dev/getting_started/find_root/index.html @@ -69,4 +69,4 @@ 0.0 0.0 0.0

Step 5: Analyze the Solution

Now let's check out the solution. First of all, what kind of thing is the sol? We can see that by asking for its type:

typeof(sol)
SciMLBase.NonlinearSolution{Float64, 1, Vector{Float64}, Vector{Float64}, SciMLBase.NonlinearProblem{Vector{Float64}, true, Vector{Float64}, SciMLBase.NonlinearFunction{true, SciMLBase.FullSpecialize, ModelingToolkit.var"#f#733"{RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:ˍ₋arg1, :ˍ₋arg2), ModelingToolkit.var"#_RGF_ModTag", ModelingToolkit.var"#_RGF_ModTag", (0x892348f8, 0xa22abdbe, 0xa285f9d8, 0x95831967, 0xeaff7cf7), Nothing}, RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:ˍ₋out, :ˍ₋arg1, :ˍ₋arg2), ModelingToolkit.var"#_RGF_ModTag", ModelingToolkit.var"#_RGF_ModTag", (0x2a5931a9, 0x19d9b662, 0x2773c77d, 0x8f1eafb9, 0xd485c91b), Nothing}}, LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Vector{Symbol}, Vector{Symbol}, ModelingToolkit.var"#generated_observed#736"{ModelingToolkit.NonlinearSystem, Dict{Any, Any}}, Nothing, ModelingToolkit.NonlinearSystem, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardNonlinearProblem}, NonlinearSolve.NewtonRaphson{0, true, Val{:forward}, Nothing, typeof(NonlinearSolve.DEFAULT_PRECS), true, nothing}, Nothing, Nothing, SciMLBase.NLStats}

From this, we can see that it is an NonlinearSolution. We can see the documentation for how to use the NonlinearSolution by checking the NonlinearSolve.jl solution type page. For example, the solution is stored as .u. What is the solution to our nonlinear system, and what is the final residual value? We can check it as follows:

# Analyze the solution
-@show sol.u, sol.resid
([0.0, 0.0, 0.0], [0.0, 0.0, 0.0])
+@show sol.u, sol.resid
([0.0, 0.0, 0.0], [0.0, 0.0, 0.0])
diff --git a/dev/getting_started/first_optimization/index.html b/dev/getting_started/first_optimization/index.html index 4607ee45333..2dcefc7a1a6 100644 --- a/dev/getting_started/first_optimization/index.html +++ b/dev/getting_started/first_optimization/index.html @@ -28,4 +28,4 @@ sol = solve(prob, NLopt.LD_LBFGS())
u: 2-element Vector{Float64}:
  1.0
  1.0

Step 4: Analyze the Solution

Now let's check out the solution. First of all, what kind of thing is the sol? We can see that by asking for its type:

typeof(sol)
SciMLBase.OptimizationSolution{Float64, 1, Vector{Float64}, Optimization.OptimizationCache{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), OptimizationForwardDiffExt.var"#38#56"{ForwardDiff.GradientConfig{ForwardDiff.Tag{OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, Float64}, Float64, 2}}}, OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}}, OptimizationForwardDiffExt.var"#41#59"{ForwardDiff.HessianConfig{ForwardDiff.Tag{OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, Float64}, ForwardDiff.Dual{ForwardDiff.Tag{OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, Float64}, Float64, 2}, 2}}, Vector{ForwardDiff.Dual{ForwardDiff.Tag{OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, Float64}, Float64, 2}}}, OptimizationForwardDiffExt.var"#37#55"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}}, OptimizationForwardDiffExt.var"#44#62", Nothing, OptimizationForwardDiffExt.var"#48#66"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, OptimizationForwardDiffExt.var"#53#71"{SciMLBase.OptimizationFunction{true, ADTypes.AutoForwardDiff{nothing, Nothing}, typeof(Main.L), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Optimization.ReInitCache{Vector{Float64}, Vector{Float64}}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, NLopt.Algorithm, Base.Iterators.Cycle{Tuple{Optimization.NullData}}, Bool, Optimization.var"#41#43"}, NLopt.Algorithm, Float64, NLopt.Opt, Float64, Nothing}

From this, we can see that it is an OptimizationSolution. We can see the documentation for how to use the OptimizationSolution by checking the Optimization.jl solution type page. For example, the solution is stored as .u. What is the solution to our optimization, and what is the final loss value? We can check it as follows:

# Analyze the solution
-@show sol.u, L(sol.u, p)
([1.0, 1.0], 0.0)
+@show sol.u, L(sol.u, p)
([1.0, 1.0], 0.0)
diff --git a/dev/getting_started/first_simulation/index.html b/dev/getting_started/first_simulation/index.html index be4f8ccb111..81210b4ff01 100644 --- a/dev/getting_started/first_simulation/index.html +++ b/dev/getting_started/first_simulation/index.html @@ -205,4 +205,4 @@ [1.816420140681761, 4.064056625315978] [1.1465021407690728, 2.7911706616216976] [0.9557986135403302, 1.6235622951850799] - [1.0337581256020607, 0.9063703842886133]

Now go make your professor mad that they have to grade a fully emojified code. I'll vouch for you: the documentation told you to do this.

+ [1.0337581256020607, 0.9063703842886133]

Now go make your professor mad that they have to grade a fully emojified code. I'll vouch for you: the documentation told you to do this.

diff --git a/dev/getting_started/fit_simulation/index.html b/dev/getting_started/fit_simulation/index.html index cc00196b4de..f93f0154c5e 100644 --- a/dev/getting_started/fit_simulation/index.html +++ b/dev/getting_started/fit_simulation/index.html @@ -136,4 +136,4 @@ 1.5002841181763384 1.0009286254415546 3.0002381483082114 - 1.0004604305929508

and the answer from the optimization is our desired parameters.

+ 1.0004604305929508

and the answer from the optimization is our desired parameters.

diff --git a/dev/getting_started/getting_started/index.html b/dev/getting_started/getting_started/index.html index 9cc3a8386e4..955786960b3 100644 --- a/dev/getting_started/getting_started/index.html +++ b/dev/getting_started/getting_started/index.html @@ -1,2 +1,2 @@ -Getting Started with Julia's SciML · Overview of Julia's SciML

Getting Started with Julia's SciML

Quickly: What is Julia's SciML Ecosystem?

Julia's SciML is:

  • SciPy or MATLAB's standard library but in Julia, but
  • Runs orders of magnitude faster, even outperforms C and Fortran libraries, and
  • Is fully compatible with machine learning and automatic differentiation,
  • All while having an easy-to-use high level interactive development environment.

Interested?

Introductory Tutorials

Note

Each of the SciML packages starts with its own introductory tutorial as well! Once you have started to get the hang of a few things, start checking out the introductory tutorials of the different packages. For example, the DifferentialEquations.jl getting started tutorial is a fun one!

Coming from...

Are you familiar with other scientific computing tools? Take a look at the guided introductions below.

+Getting Started with Julia's SciML · Overview of Julia's SciML

Getting Started with Julia's SciML

Quickly: What is Julia's SciML Ecosystem?

Julia's SciML is:

  • SciPy or MATLAB's standard library but in Julia, but
  • Runs orders of magnitude faster, even outperforms C and Fortran libraries, and
  • Is fully compatible with machine learning and automatic differentiation,
  • All while having an easy-to-use high level interactive development environment.

Interested?

Introductory Tutorials

Note

Each of the SciML packages starts with its own introductory tutorial as well! Once you have started to get the hang of a few things, start checking out the introductory tutorials of the different packages. For example, the DifferentialEquations.jl getting started tutorial is a fun one!

Coming from...

Are you familiar with other scientific computing tools? Take a look at the guided introductions below.

diff --git a/dev/getting_started/installation/index.html b/dev/getting_started/installation/index.html index 68a88e6a373..71dd6f9e89c 100644 --- a/dev/getting_started/installation/index.html +++ b/dev/getting_started/installation/index.html @@ -1,3 +1,3 @@ Installing SciML Software · Overview of Julia's SciML

Installing SciML Software

Step 1: Install Julia

Download Julia using this website.

Note

Some Linux distributions do weird and incorrect things with Julia installations! Please install Julia using the binaries provided by the official JuliaLang website!

To ensure that you have installed Julia correctly, open it up and type versioninfo() in the REPL. It should look like the following:

(with the CPU/OS/etc. details matching your computer!)

If you got stuck in this installation process, ask for help on the Julia Discourse or in the Julia Zulip chatrooms

Optional Step 1.5: Get VS Code Setup with the Julia Extension

You can run SciML with Julia in any development environment you please, but our recommended environment is VS Code. For more information on using Julia with VS Code, check out the Julia VS Code Extension website. Let's install it!

First download VS Code from the official website.

Next, open Visual Studio Code and click Extensions.

Then, search for “Julia” in the search bar on the top of the extension tab, click on the “Julia” extension, and click the install button on the tab that opens up.

To make sure your installation is correct, try running some code. Open a new file by either going to the top left navigation bar File |> New Text File, or hitting Ctrl+n. Name your new file test.jl (important: the Julia VS Code functionality only turns on when using a .jl file!). Next, type 1+1 and hit Ctrl+Enter. A Julia REPL should pop up and the result 2 should be displayed. Your environment should look something like this:

For more help on using the VS Code editor with Julia, check out the VS Code in Julia documentation. Useful keyboard commands can be found here.

Once again, if you got stuck in this installation process, ask for help on the Julia Discourse or in the Julia Zulip chatrooms

Step 2: Install a SciML Package

SciML is over 130 Julia packages. That's too much stuff to give someone in a single download! Thus instead, the SciML organization divides its functionality into composable modules that can be mixed and matched as required. Installing SciML ecosystem functionality is equivalent to installation of such packages.

For example, do you need the differential equation solver? Then install DifferentialEquations via the command:

using Pkg;
-Pkg.add("DifferentialEquations");

in the Julia REPL. Or, for a more robust REPL experience, hit the ] command to make the blue pkg> REPL environment start, and type in add DifferentialEquations. The package REPL environment will have nice extras like auto-complete that will be useful in the future. This command should run an installation sequence and precompile all of the packages (precompile = "run a bunch of performance optimizations!"). Don't be surprised if this installation process takes ~10 minutes on older computers. During the installation, it should look like this:

And that's it!

How do I test that my installed correctly?

The best way is to build and run your first simulation!

+Pkg.add("DifferentialEquations");

in the Julia REPL. Or, for a more robust REPL experience, hit the ] command to make the blue pkg> REPL environment start, and type in add DifferentialEquations. The package REPL environment will have nice extras like auto-complete that will be useful in the future. This command should run an installation sequence and precompile all of the packages (precompile = "run a bunch of performance optimizations!"). Don't be surprised if this installation process takes ~10 minutes on older computers. During the installation, it should look like this:

And that's it!

How do I test that my installed correctly?

The best way is to build and run your first simulation!

diff --git a/dev/highlevels/array_libraries/index.html b/dev/highlevels/array_libraries/index.html index c1b38b79fb8..c80dde7db5a 100644 --- a/dev/highlevels/array_libraries/index.html +++ b/dev/highlevels/array_libraries/index.html @@ -22,4 +22,4 @@ lorenz_p = (σ = 10.0, ρ = 28.0, β = 8 / 3) lorenz_ic = ComponentArray(x = 0.0, y = 0.0, z = 0.0) -lorenz_prob = ODEProblem(lorenz!, lorenz_ic, tspan, lorenz_p)

Is that beautiful? Yes, it is.

StaticArrays.jl: Statically-Defined Arrays

StaticArrays.jl is a library for statically-defined arrays. Because these arrays have type-level information for size, they recompile the solvers for every new size. They can be dramatically faster for small sizes (up to approximately size 10), but for larger equations they increase compile time with little to no benefit.

CUDA.jl: NVIDIA CUDA-Based GPU Array Computations

CUDA.jl is the library for defining arrays which live on NVIDIA GPUs (CuArray). SciML's libraries will respect the GPU-ness of the inputs, i.e., if the input arrays live on the GPU then the operations will all take place on the GPU or else the libraries will error if it's unable to do so. Thus, using CUDA.jl's CuArray is how one GPU-accelerates any computation with the SciML organization's libraries. Simply use a CuArray as the initial condition to an ODE solve or as the initial guess for a nonlinear solve, and the whole solve will recompile to take place on the GPU.

AMDGPU.jl: AMD-Based GPU Array Computations

AMDGPU.jl is the library for defining arrays which live on AMD GPUs (ROCArray). SciML's libraries will respect the GPU-ness of the inputs, i.e., if the input arrays live on the GPU then the operations will all take place on the GPU or else the libraries will error if it's unable to do so. Thus using AMDGPU.jl's ROCArray is how one GPU-accelerates any computation with the SciML organization's libraries. Simply use a ROCArray as the initial condition to an ODE solve or as the initial guess for a nonlinear solve, and the whole solve will recompile to take place on the GPU.

FillArrays.jl: Lazy Arrays

FillArrays.jl is a library for defining arrays with lazy values. For example, an O(1) representation of the identity matrix is given by Eye{Int}(5). FillArrays.jl is used extensively throughout the ecosystem to improve runtime and memory performance.

BandedMatrices.jl: Fast Banded Matrices

Banded matrices show up in many equation solver contexts, such as the Jacobians of many partial differential equations. While the base SparseMatrixCSC sparse matrix type can represent such matrices, BandedMatrices.jl is a specialized format specifically for BandedMatrices which can be used to greatly improve performance of operations on a banded matrix.

BlockBandedMatrices.jl: Fast Block-Banded Matrices

Block banded matrices show up in many equation solver contexts, such as the Jacobians of many systems of partial differential equations. While the base SparseMatrixCSC sparse matrix type can represent such matrices, BlockBandedMatrices.jl is a specialized format specifically for BlockBandedMatrices which can be used to greatly improve performance of operations on a block-banded matrix.

+lorenz_prob = ODEProblem(lorenz!, lorenz_ic, tspan, lorenz_p)

Is that beautiful? Yes, it is.

StaticArrays.jl: Statically-Defined Arrays

StaticArrays.jl is a library for statically-defined arrays. Because these arrays have type-level information for size, they recompile the solvers for every new size. They can be dramatically faster for small sizes (up to approximately size 10), but for larger equations they increase compile time with little to no benefit.

CUDA.jl: NVIDIA CUDA-Based GPU Array Computations

CUDA.jl is the library for defining arrays which live on NVIDIA GPUs (CuArray). SciML's libraries will respect the GPU-ness of the inputs, i.e., if the input arrays live on the GPU then the operations will all take place on the GPU or else the libraries will error if it's unable to do so. Thus, using CUDA.jl's CuArray is how one GPU-accelerates any computation with the SciML organization's libraries. Simply use a CuArray as the initial condition to an ODE solve or as the initial guess for a nonlinear solve, and the whole solve will recompile to take place on the GPU.

AMDGPU.jl: AMD-Based GPU Array Computations

AMDGPU.jl is the library for defining arrays which live on AMD GPUs (ROCArray). SciML's libraries will respect the GPU-ness of the inputs, i.e., if the input arrays live on the GPU then the operations will all take place on the GPU or else the libraries will error if it's unable to do so. Thus using AMDGPU.jl's ROCArray is how one GPU-accelerates any computation with the SciML organization's libraries. Simply use a ROCArray as the initial condition to an ODE solve or as the initial guess for a nonlinear solve, and the whole solve will recompile to take place on the GPU.

FillArrays.jl: Lazy Arrays

FillArrays.jl is a library for defining arrays with lazy values. For example, an O(1) representation of the identity matrix is given by Eye{Int}(5). FillArrays.jl is used extensively throughout the ecosystem to improve runtime and memory performance.

BandedMatrices.jl: Fast Banded Matrices

Banded matrices show up in many equation solver contexts, such as the Jacobians of many partial differential equations. While the base SparseMatrixCSC sparse matrix type can represent such matrices, BandedMatrices.jl is a specialized format specifically for BandedMatrices which can be used to greatly improve performance of operations on a banded matrix.

BlockBandedMatrices.jl: Fast Block-Banded Matrices

Block banded matrices show up in many equation solver contexts, such as the Jacobians of many systems of partial differential equations. While the base SparseMatrixCSC sparse matrix type can represent such matrices, BlockBandedMatrices.jl is a specialized format specifically for BlockBandedMatrices which can be used to greatly improve performance of operations on a block-banded matrix.

diff --git a/dev/highlevels/developer_documentation/index.html b/dev/highlevels/developer_documentation/index.html index 6e79b12fe46..1f6278f7340 100644 --- a/dev/highlevels/developer_documentation/index.html +++ b/dev/highlevels/developer_documentation/index.html @@ -1,3 +1,3 @@ Developer Documentation · Overview of Julia's SciML

Developer Documentation

For uniformity and clarity, the SciML Open-Source Software Organization has many well-defined rules and practices for its development. However, we stress one important principle:

Do not be deterred from contributing if you think you do not know everything. No one knows everything. These rules and styles are designed for iterative contributions. Open pull requests and contribute what you can with what you know, and the maintainers will help you learn and do the rest!

If you need any help contributing, please feel welcome joining our community channels.

We welcome everybody.

Getting Started With Contributing to SciML

To get started contributing to SciML, check out the following resources:

SciMLStyle: The SciML Style Guide for Julia

SciML Code Style

This is a style guide for how to program in Julia for SciML contributions. It describes everything one needs to know, from preferred naming schemes of functions to fundamental dogmas for designing traits. We stress that this style guide is meant to be comprehensive for the sake of designing automatic formatters and teaching desired rules, but complete knowledge and adherence to the style guide is not required for contributions!

COLPRAC: Contributor's Guide on Collaborative Practices for Community Packages

ColPrac: Contributor's Guide on Collaborative Practices for Community Packages

What are the rules for when PRs should be merged? What are the rules for whether to tag a major, minor, or patch release? All of these development rules are defined in COLPRAC.

DiffEq Developer Documentation

There are many solver libraries which share similar internals, such as OrdinaryDiffEq.jl, StochasticDiffEq.jl, and DelayDiffEq.jl. This section of the documentation describes the internal systems of these packages and how they are used to quickly write efficient solvers.

Third-Party Libraries to Note

Documenter.jl

Documenter.jl is the documentation generation library that the SciML organization uses, and thus its documentation is the documentation of the documentation.

JuliaFormatter.jl

JuliaFormatter.jl is the formatter used by the SciML organization to enforce the SciML Style. Setting style = "sciml" in a .JuliaFormatter.toml file of a repo and using the standard FormatCheck.yml as part of continuous integration makes JuliaFormatter check for SciML Style compliance on pull requests.

To run JuliaFormatter in a SciML repository, do:

using JuliaFomatter, DevedPackage
-JuliaFormatter.format(pkgdir(DevedPackage))

which will reformat the code according to the SciML Style.

GitHub Actions Continuous Integrations

The SciML Organization uses continuous integration testing to always ensure tests are passing when merging pull requests. The organization uses the GitHub Actions supplied by Julia Actions to accomplish this. Common continuous integration scripts are:

  • CI.yml, the standard CI script
  • Downstream.yml, used to specify packages for downstream testing. This will make packages which depend on the current package also be tested to ensure that “non-breaking changes” do not actually break other packages.
  • Documentation.yml, used to run the documentation automatic generation with Documenter.jl
  • FormatCheck.yml, used to check JuliaFormatter SciML Style compliance

CompatHelper

CompatHelper is used to automatically create pull requests whenever a dependent package is upper bounded. The results of CompatHelper PRs should be checked to ensure that the latest version of the dependencies are grabbed for the test process. After successful CompatHelper PRs, i.e. if the increase of the upper bound did not cause a break to the tests, a new version tag should follow. It is set up by adding the CompatHelper.yml GitHub action.

TagBot

TagBot automatically creates tags in the GitHub repository whenever a package is registered to the Julia General repository. It is set up by adding the TagBot.yml GitHub action.

+JuliaFormatter.format(pkgdir(DevedPackage))

which will reformat the code according to the SciML Style.

GitHub Actions Continuous Integrations

The SciML Organization uses continuous integration testing to always ensure tests are passing when merging pull requests. The organization uses the GitHub Actions supplied by Julia Actions to accomplish this. Common continuous integration scripts are:

CompatHelper

CompatHelper is used to automatically create pull requests whenever a dependent package is upper bounded. The results of CompatHelper PRs should be checked to ensure that the latest version of the dependencies are grabbed for the test process. After successful CompatHelper PRs, i.e. if the increase of the upper bound did not cause a break to the tests, a new version tag should follow. It is set up by adding the CompatHelper.yml GitHub action.

TagBot

TagBot automatically creates tags in the GitHub repository whenever a package is registered to the Julia General repository. It is set up by adding the TagBot.yml GitHub action.

diff --git a/dev/highlevels/equation_solvers/index.html b/dev/highlevels/equation_solvers/index.html index f9102bc6f1c..6dc2d3e9de6 100644 --- a/dev/highlevels/equation_solvers/index.html +++ b/dev/highlevels/equation_solvers/index.html @@ -1,2 +1,2 @@ -Equation Solvers · Overview of Julia's SciML

Equation Solvers

The SciML Equation Solvers cover a large set of SciMLProblems with SciMLAlgorithms that are efficient, numerically stable, and flexible. These methods tie into libraries like SciMLSensitivity.jl to be fully differentiable and compatible with machine learning pipelines, and are designed for integration with applications like parameter estimation, global sensitivity analysis, and more.

LinearSolve.jl: Unified Interface for Linear Solvers

LinearSolve.jl is the canonical library for solving LinearProblems. It includes:

  • Fast pure Julia LU factorizations which outperform standard BLAS
  • KLU for faster sparse LU factorization on unstructured matrices
  • UMFPACK for faster sparse LU factorization on matrices with some repeated structure
  • MKLPardiso wrappers for handling many sparse matrices faster than SuiteSparse (KLU, UMFPACK) methods
  • GPU-offloading for large dense matrices
  • Wrappers to all of the Krylov implementations (Krylov.jl, IterativeSolvers.jl, KrylovKit.jl) for easy testing of all of them. LinearSolve.jl handles the API differences, especially with the preconditioner definitions
  • A polyalgorithm that smartly chooses between these methods
  • A caching interface which automates caching of symbolic factorizations and numerical factorizations as optimally as possible
  • Compatible with arbitrary AbstractArray and Number types, such as GPU-based arrays, uncertainty quantification number types, and more.

NonlinearSolve.jl: Unified Interface for Nonlinear Solvers

NonlinearSolve.jl is the canonical library for solving NonlinearProblems. It includes:

  • Fast non-allocating implementations on static arrays of common methods (Newton-Rhapson)
  • Bracketing methods (Bisection, Falsi) for methods with known upper and lower bounds (IntervalNonlinearProblem)
  • Wrappers to common other solvers (NLsolve.jl, MINPACK, KINSOL from Sundials) for trust region methods, line search-based approaches, etc.
  • Built over the LinearSolve.jl API for maximum flexibility and performance in the solving approach
  • Compatible with arbitrary AbstractArray and Number types, such as GPU-based arrays, uncertainty quantification number types, and more.

DifferentialEquations.jl: Unified Interface for Differential Equation Solvers

DifferentialEquations.jl is the canonical library for solving DEProblems. This includes:

  • Discrete equations (function maps, discrete stochastic (Gillespie/Markov) simulations) (DiscreteProblem)
  • Ordinary differential equations (ODEs) (ODEProblem)
  • Split and Partitioned ODEs (Symplectic integrators, IMEX Methods) (SplitODEProblem)
  • Stochastic ordinary differential equations (SODEs or SDEs) (SDEProblem)
  • Stochastic differential-algebraic equations (SDAEs) (SDEProblem with mass matrices)
  • Random differential equations (RODEs or RDEs) (RODEProblem)
  • Differential algebraic equations (DAEs) (DAEProblem and ODEProblem with mass matrices)
  • Delay differential equations (DDEs) (DDEProblem)
  • Neutral, retarded, and algebraic delay differential equations (NDDEs, RDDEs, and DDAEs)
  • Stochastic delay differential equations (SDDEs) (SDDEProblem)
  • Experimental support for stochastic neutral, retarded, and algebraic delay differential equations (SNDDEs, SRDDEs, and SDDAEs)
  • Mixed discrete and continuous equations (Hybrid Equations, Jump Diffusions) (DEProblems with callbacks and JumpProblem)

The well-optimized DifferentialEquations solvers benchmark as some of the fastest implementations of classic algorithms. It also includes algorithms from recent research which routinely outperform the “standard” C/Fortran methods, and algorithms optimized for high-precision and HPC applications. Simultaneously, it wraps the classic C/Fortran methods, making it easy to switch over to them whenever necessary. Solving differential equations with different methods from different languages and packages can be done by changing one line of code, allowing for easy benchmarking to ensure you are using the fastest method possible.

DifferentialEquations.jl integrates with the Julia package sphere. Examples are:

  • GPU acceleration through CUDAnative.jl and CuArrays.jl
  • Automated sparsity detection with Symbolics.jl
  • Automatic Jacobian coloring with SparseDiffTools.jl, allowing for fast solutions to problems with sparse or structured (Tridiagonal, Banded, BlockBanded, etc.) Jacobians
  • Allowing the specification of linear solvers for maximal efficiency
  • Progress meter integration with the Juno IDE for estimated time to solution
  • Automatic plotting of time series and phase plots
  • Built-in interpolations
  • Wraps for common C/Fortran methods, like Sundials and Hairer's radau
  • Arbitrary precision with BigFloats and Arbfloats
  • Arbitrary array types, allowing the definition of differential equations on matrices and distributed arrays
  • Unit-checked arithmetic with Unitful

Optimization.jl: Unified Interface for Optimization

Optimization.jl is the canonical library for solving OptimizationProblems. It includes wrappers of most of the Julia nonlinear optimization ecosystem, allowing one syntax to use all packages in a uniform manner. This covers:

Integrals.jl: Unified Interface for Numerical Integration

Integrals.jl is the canonical library for solving IntegralsProblems. It includes wrappers of most of the Julia quadrature ecosystem, allowing one syntax to use all packages in a uniform manner. This covers:

  • Gauss-Kronrod quadrature
  • Cubature methods (both h and p cubature)
  • Adaptive Monte Carlo methods

JumpProcesses.jl: Stochastic Simulation Algorithms for Jump Processes, Jump-ODEs, and Jump-Diffusions

JumpProcesses.jl is the library for Poisson jump processes, also known as chemical master equations or Gillespie simulations, for simulating chemical reaction networks and other applications. It allows for solving with many methods, including:

  • Direct: the Gillespie Direct method SSA.
  • RDirect: A variant of Gillespie's Direct method that uses rejection to sample the next reaction.
  • DirectCR: The Composition-Rejection Direct method of Slepoy et al. For large networks and linear chain-type networks, it will often give better performance than Direct. (Requires dependency graph, see below.)
  • DirectFW: the Gillespie Direct method SSA with FunctionWrappers. This aggregator uses a different internal storage format for collections of ConstantRateJumps.
  • FRM: the Gillespie first reaction method SSA. Direct should generally offer better performance and be preferred to FRM.
  • FRMFW: the Gillespie first reaction method SSA with FunctionWrappers.
  • NRM: The Gibson-Bruck Next Reaction Method. For some reaction network structures, this may offer better performance than Direct (for example, large, linear chains of reactions). (Requires dependency graph, see below.)
  • RSSA: The Rejection SSA (RSSA) method of Thanh et al. With RSSACR, for very large reaction networks, it often offers the best performance of all methods. (Requires dependency graph, see below.)
  • RSSACR: The Rejection SSA (RSSA) with Composition-Rejection method of Thanh et al. With RSSA, for very large reaction networks, it often offers the best performance of all methods. (Requires dependency graph, see below.)
  • SortingDirect: The Sorting Direct Method of McCollum et al. It will usually offer performance as good as Direct, and for some systems can offer substantially better performance. (Requires dependency graph, see below.)

The design of JumpProcesses.jl composes with DifferentialEquations.jl, allowing for discrete stochastic chemical reactions to be easily mixed with differential equation models, allowing for simulation of hybrid systems, jump diffusions, and differential equations driven by Levy processes.

In addition, JumpProcesses's interfaces allow for solving with regular jump methods, such as adaptive Tau-Leaping.

Third-Party Libraries to Note

JuMP.jl: Julia for Mathematical Programming

While Optimization.jl is the preferred library for nonlinear optimization, for all other forms of optimization Julia for Mathematical Programming (JuMP) is the star. JuMP is the leading choice in Julia for doing:

  • Linear Programming
  • Quadratic Programming
  • Convex Programming
  • Conic Programming
  • Semidefinite Programming
  • Mixed-Complementarity Programming
  • Integer Programming
  • Mixed Integer (nonlinear/linear) Programming
  • (Mixed Integer) Second Order Conic Programming

JuMP can also be used for some nonlinear programming, though the Optimization.jl bindings to the JuMP solvers (via MathOptInterface.jl) is generally preferred.

FractionalDiffEq.jl: Fractional Differential Equation Solvers

FractionalDiffEq.jl is a set of high-performance solvers for fractional differential equations.

ManifoldDiffEq.jl: Solvers for Differential Equations on Manifolds

ManifoldDiffEq.jl is a set of high-performance solvers for differential equations on manifolds using methods such as Lie Group actions and frozen coefficients (Crouch-Grossman methods). These solvers can in many cases out-perform the OrdinaryDiffEq.jl nonautonomous operator ODE solvers by using methods specialized on manifold definitions of ManifoldsBase.

Manopt.jl: Optimization on Manifolds

ManOpt.jl allows for easy and efficient solving of nonlinear optimization problems on manifolds.

+Equation Solvers · Overview of Julia's SciML

Equation Solvers

The SciML Equation Solvers cover a large set of SciMLProblems with SciMLAlgorithms that are efficient, numerically stable, and flexible. These methods tie into libraries like SciMLSensitivity.jl to be fully differentiable and compatible with machine learning pipelines, and are designed for integration with applications like parameter estimation, global sensitivity analysis, and more.

LinearSolve.jl: Unified Interface for Linear Solvers

LinearSolve.jl is the canonical library for solving LinearProblems. It includes:

  • Fast pure Julia LU factorizations which outperform standard BLAS
  • KLU for faster sparse LU factorization on unstructured matrices
  • UMFPACK for faster sparse LU factorization on matrices with some repeated structure
  • MKLPardiso wrappers for handling many sparse matrices faster than SuiteSparse (KLU, UMFPACK) methods
  • GPU-offloading for large dense matrices
  • Wrappers to all of the Krylov implementations (Krylov.jl, IterativeSolvers.jl, KrylovKit.jl) for easy testing of all of them. LinearSolve.jl handles the API differences, especially with the preconditioner definitions
  • A polyalgorithm that smartly chooses between these methods
  • A caching interface which automates caching of symbolic factorizations and numerical factorizations as optimally as possible
  • Compatible with arbitrary AbstractArray and Number types, such as GPU-based arrays, uncertainty quantification number types, and more.

NonlinearSolve.jl: Unified Interface for Nonlinear Solvers

NonlinearSolve.jl is the canonical library for solving NonlinearProblems. It includes:

  • Fast non-allocating implementations on static arrays of common methods (Newton-Rhapson)
  • Bracketing methods (Bisection, Falsi) for methods with known upper and lower bounds (IntervalNonlinearProblem)
  • Wrappers to common other solvers (NLsolve.jl, MINPACK, KINSOL from Sundials) for trust region methods, line search-based approaches, etc.
  • Built over the LinearSolve.jl API for maximum flexibility and performance in the solving approach
  • Compatible with arbitrary AbstractArray and Number types, such as GPU-based arrays, uncertainty quantification number types, and more.

DifferentialEquations.jl: Unified Interface for Differential Equation Solvers

DifferentialEquations.jl is the canonical library for solving DEProblems. This includes:

  • Discrete equations (function maps, discrete stochastic (Gillespie/Markov) simulations) (DiscreteProblem)
  • Ordinary differential equations (ODEs) (ODEProblem)
  • Split and Partitioned ODEs (Symplectic integrators, IMEX Methods) (SplitODEProblem)
  • Stochastic ordinary differential equations (SODEs or SDEs) (SDEProblem)
  • Stochastic differential-algebraic equations (SDAEs) (SDEProblem with mass matrices)
  • Random differential equations (RODEs or RDEs) (RODEProblem)
  • Differential algebraic equations (DAEs) (DAEProblem and ODEProblem with mass matrices)
  • Delay differential equations (DDEs) (DDEProblem)
  • Neutral, retarded, and algebraic delay differential equations (NDDEs, RDDEs, and DDAEs)
  • Stochastic delay differential equations (SDDEs) (SDDEProblem)
  • Experimental support for stochastic neutral, retarded, and algebraic delay differential equations (SNDDEs, SRDDEs, and SDDAEs)
  • Mixed discrete and continuous equations (Hybrid Equations, Jump Diffusions) (DEProblems with callbacks and JumpProblem)

The well-optimized DifferentialEquations solvers benchmark as some of the fastest implementations of classic algorithms. It also includes algorithms from recent research which routinely outperform the “standard” C/Fortran methods, and algorithms optimized for high-precision and HPC applications. Simultaneously, it wraps the classic C/Fortran methods, making it easy to switch over to them whenever necessary. Solving differential equations with different methods from different languages and packages can be done by changing one line of code, allowing for easy benchmarking to ensure you are using the fastest method possible.

DifferentialEquations.jl integrates with the Julia package sphere. Examples are:

  • GPU acceleration through CUDAnative.jl and CuArrays.jl
  • Automated sparsity detection with Symbolics.jl
  • Automatic Jacobian coloring with SparseDiffTools.jl, allowing for fast solutions to problems with sparse or structured (Tridiagonal, Banded, BlockBanded, etc.) Jacobians
  • Allowing the specification of linear solvers for maximal efficiency
  • Progress meter integration with the Juno IDE for estimated time to solution
  • Automatic plotting of time series and phase plots
  • Built-in interpolations
  • Wraps for common C/Fortran methods, like Sundials and Hairer's radau
  • Arbitrary precision with BigFloats and Arbfloats
  • Arbitrary array types, allowing the definition of differential equations on matrices and distributed arrays
  • Unit-checked arithmetic with Unitful

Optimization.jl: Unified Interface for Optimization

Optimization.jl is the canonical library for solving OptimizationProblems. It includes wrappers of most of the Julia nonlinear optimization ecosystem, allowing one syntax to use all packages in a uniform manner. This covers:

Integrals.jl: Unified Interface for Numerical Integration

Integrals.jl is the canonical library for solving IntegralsProblems. It includes wrappers of most of the Julia quadrature ecosystem, allowing one syntax to use all packages in a uniform manner. This covers:

  • Gauss-Kronrod quadrature
  • Cubature methods (both h and p cubature)
  • Adaptive Monte Carlo methods

JumpProcesses.jl: Stochastic Simulation Algorithms for Jump Processes, Jump-ODEs, and Jump-Diffusions

JumpProcesses.jl is the library for Poisson jump processes, also known as chemical master equations or Gillespie simulations, for simulating chemical reaction networks and other applications. It allows for solving with many methods, including:

  • Direct: the Gillespie Direct method SSA.
  • RDirect: A variant of Gillespie's Direct method that uses rejection to sample the next reaction.
  • DirectCR: The Composition-Rejection Direct method of Slepoy et al. For large networks and linear chain-type networks, it will often give better performance than Direct. (Requires dependency graph, see below.)
  • DirectFW: the Gillespie Direct method SSA with FunctionWrappers. This aggregator uses a different internal storage format for collections of ConstantRateJumps.
  • FRM: the Gillespie first reaction method SSA. Direct should generally offer better performance and be preferred to FRM.
  • FRMFW: the Gillespie first reaction method SSA with FunctionWrappers.
  • NRM: The Gibson-Bruck Next Reaction Method. For some reaction network structures, this may offer better performance than Direct (for example, large, linear chains of reactions). (Requires dependency graph, see below.)
  • RSSA: The Rejection SSA (RSSA) method of Thanh et al. With RSSACR, for very large reaction networks, it often offers the best performance of all methods. (Requires dependency graph, see below.)
  • RSSACR: The Rejection SSA (RSSA) with Composition-Rejection method of Thanh et al. With RSSA, for very large reaction networks, it often offers the best performance of all methods. (Requires dependency graph, see below.)
  • SortingDirect: The Sorting Direct Method of McCollum et al. It will usually offer performance as good as Direct, and for some systems can offer substantially better performance. (Requires dependency graph, see below.)

The design of JumpProcesses.jl composes with DifferentialEquations.jl, allowing for discrete stochastic chemical reactions to be easily mixed with differential equation models, allowing for simulation of hybrid systems, jump diffusions, and differential equations driven by Levy processes.

In addition, JumpProcesses's interfaces allow for solving with regular jump methods, such as adaptive Tau-Leaping.

Third-Party Libraries to Note

JuMP.jl: Julia for Mathematical Programming

While Optimization.jl is the preferred library for nonlinear optimization, for all other forms of optimization Julia for Mathematical Programming (JuMP) is the star. JuMP is the leading choice in Julia for doing:

  • Linear Programming
  • Quadratic Programming
  • Convex Programming
  • Conic Programming
  • Semidefinite Programming
  • Mixed-Complementarity Programming
  • Integer Programming
  • Mixed Integer (nonlinear/linear) Programming
  • (Mixed Integer) Second Order Conic Programming

JuMP can also be used for some nonlinear programming, though the Optimization.jl bindings to the JuMP solvers (via MathOptInterface.jl) is generally preferred.

FractionalDiffEq.jl: Fractional Differential Equation Solvers

FractionalDiffEq.jl is a set of high-performance solvers for fractional differential equations.

ManifoldDiffEq.jl: Solvers for Differential Equations on Manifolds

ManifoldDiffEq.jl is a set of high-performance solvers for differential equations on manifolds using methods such as Lie Group actions and frozen coefficients (Crouch-Grossman methods). These solvers can in many cases out-perform the OrdinaryDiffEq.jl nonautonomous operator ODE solvers by using methods specialized on manifold definitions of ManifoldsBase.

Manopt.jl: Optimization on Manifolds

ManOpt.jl allows for easy and efficient solving of nonlinear optimization problems on manifolds.

diff --git a/dev/highlevels/function_approximation/index.html b/dev/highlevels/function_approximation/index.html index ba61fbfd101..3595b7dfe3e 100644 --- a/dev/highlevels/function_approximation/index.html +++ b/dev/highlevels/function_approximation/index.html @@ -1,2 +1,2 @@ -Function Approximation · Overview of Julia's SciML

Function Approximation

While SciML is not an ecosystem for machine learning, SciML has many libraries for doing machine learning with its equation solver libraries and machine learning libraries which are integrated into the equation solvers.

Surrogates.jl: Easy Generation of Differentiable Surrogate Models

Surrogates.jl is a library for generating surrogate approximations to computationally expensive simulations. It has the following high-dimensional function approximators:

  • Kriging
  • Kriging using Stheno
  • Radial Basis
  • Wendland
  • Linear
  • Second Order Polynomial
  • Support Vector Machines (Wait for LIBSVM resolution)
  • Neural Networks
  • Random Forests
  • Lobachevsky splines
  • Inverse-distance
  • Polynomial expansions
  • Variable fidelity
  • Mixture of experts (Waiting GaussianMixtures package to work on v1.5)
  • Earth
  • Gradient Enhanced Kriging

ReservoirComputing.jl: Fast and Flexible Reservoir Computing Methods

ReservoirComputing.jl is a library for doing machine learning using reservoir computing techniques, such as with methods like Echo State Networks (ESNs). Its reservoir computing methods make it stabilized for usage with difficult equations like stiff dynamics, chaotic equations, and more.

Third-Party Libraries to Note

Flux.jl: the ML library that doesn't make you tensor

Flux.jl is the most popular machine learning library in the Julia programming language. SciML's libraries are heavily tested with it and its automatic differentiation engine Zygote.jl for composability and compatibility.

Lux.jl: Explicitly Parameterized Neural Networks in Julia

Lux.jl is a library for fully explicitly parameterized neural networks. Thus, while alternative interfaces are required to use Flux with many equation solvers (i.e. Flux.destructure), Lux.jl's explicit design marries effortlessly with the SciML equation solver libraries. For this reason, SciML's library are also heavily tested with Lux to ensure compatibility with neural network definitions from here.

SimpleChains.jl: Fast Small-Scale Machine Learning

SimpleChains.jl is a library specialized for small-scale machine learning. It uses non-allocating mutating forms to be highly efficient for the cases where matrix multiplication kernels cannot overcome the common overheads of machine learning libraries. Thus for SciML cases with small neural networks (<100 node layers) and non-batched usage (many/most use cases), SimpleChains.jl can be the fastest choice for the neural network definitions.

NNLib.jl: Neural Network Primitives with Multiple Backends

NNLib.jl is the core library which defines the handling of common functions, like conv and how they map to device accelerators such as the NVIDIA cudnn. This library can thus be used to directly grab many of the core functions used in machine learning, such as common activation functions and gather/scatter operations, without depending on the given style of any machine learning library.

GeometricFlux.jl: Geometric Deep Learning and Graph Neural Networks

GeometricFlux.jl is a library for graph neural networks and geometric deep learning. It is the one that is used and tested by the SciML developers for mixing with equation solver applications.

AbstractGPs.jl: Fast and Flexible Gaussian Processes

AbstractGPs.jl is the fast and flexible Gaussian Process library that is used by the SciML packages and recommended for downstream usage.

MLDatasets.jl: Common Machine Learning Datasets

MLDatasets.jl is a common interface for accessing common machine learning datasets. For example, if you want to run a test on MNIST data, MLDatasets is the quickest way to obtain it.

MLUtils.jl: Utility Functions for Machine Learning Pipelines

MLUtils.jl is a library of utility functions for making writing common machine learning pipelines easier. This includes functionality for:

  • An extensible dataset interface (numobs and getobs).
  • Data iteration and data loaders (eachobs and DataLoader).
  • Lazy data views (obsview).
  • Resampling procedures (undersample and oversample).
  • Train/test splits (splitobs)
  • Data partitioning and aggregation tools (batch, unbatch, chunk, group_counts, group_indices).
  • Folds for cross-validation (kfolds, leavepout).
  • Datasets lazy transformations (mapobs, filterobs, groupobs, joinobs, shuffleobs).
  • Toy datasets for demonstration purpose.
  • Other data handling utilities (flatten, normalise, unsqueeze, stack, unstack).
+Function Approximation · Overview of Julia's SciML

Function Approximation

While SciML is not an ecosystem for machine learning, SciML has many libraries for doing machine learning with its equation solver libraries and machine learning libraries which are integrated into the equation solvers.

Surrogates.jl: Easy Generation of Differentiable Surrogate Models

Surrogates.jl is a library for generating surrogate approximations to computationally expensive simulations. It has the following high-dimensional function approximators:

  • Kriging
  • Kriging using Stheno
  • Radial Basis
  • Wendland
  • Linear
  • Second Order Polynomial
  • Support Vector Machines (Wait for LIBSVM resolution)
  • Neural Networks
  • Random Forests
  • Lobachevsky splines
  • Inverse-distance
  • Polynomial expansions
  • Variable fidelity
  • Mixture of experts (Waiting GaussianMixtures package to work on v1.5)
  • Earth
  • Gradient Enhanced Kriging

ReservoirComputing.jl: Fast and Flexible Reservoir Computing Methods

ReservoirComputing.jl is a library for doing machine learning using reservoir computing techniques, such as with methods like Echo State Networks (ESNs). Its reservoir computing methods make it stabilized for usage with difficult equations like stiff dynamics, chaotic equations, and more.

Third-Party Libraries to Note

Flux.jl: the ML library that doesn't make you tensor

Flux.jl is the most popular machine learning library in the Julia programming language. SciML's libraries are heavily tested with it and its automatic differentiation engine Zygote.jl for composability and compatibility.

Lux.jl: Explicitly Parameterized Neural Networks in Julia

Lux.jl is a library for fully explicitly parameterized neural networks. Thus, while alternative interfaces are required to use Flux with many equation solvers (i.e. Flux.destructure), Lux.jl's explicit design marries effortlessly with the SciML equation solver libraries. For this reason, SciML's library are also heavily tested with Lux to ensure compatibility with neural network definitions from here.

SimpleChains.jl: Fast Small-Scale Machine Learning

SimpleChains.jl is a library specialized for small-scale machine learning. It uses non-allocating mutating forms to be highly efficient for the cases where matrix multiplication kernels cannot overcome the common overheads of machine learning libraries. Thus for SciML cases with small neural networks (<100 node layers) and non-batched usage (many/most use cases), SimpleChains.jl can be the fastest choice for the neural network definitions.

NNLib.jl: Neural Network Primitives with Multiple Backends

NNLib.jl is the core library which defines the handling of common functions, like conv and how they map to device accelerators such as the NVIDIA cudnn. This library can thus be used to directly grab many of the core functions used in machine learning, such as common activation functions and gather/scatter operations, without depending on the given style of any machine learning library.

GeometricFlux.jl: Geometric Deep Learning and Graph Neural Networks

GeometricFlux.jl is a library for graph neural networks and geometric deep learning. It is the one that is used and tested by the SciML developers for mixing with equation solver applications.

AbstractGPs.jl: Fast and Flexible Gaussian Processes

AbstractGPs.jl is the fast and flexible Gaussian Process library that is used by the SciML packages and recommended for downstream usage.

MLDatasets.jl: Common Machine Learning Datasets

MLDatasets.jl is a common interface for accessing common machine learning datasets. For example, if you want to run a test on MNIST data, MLDatasets is the quickest way to obtain it.

MLUtils.jl: Utility Functions for Machine Learning Pipelines

MLUtils.jl is a library of utility functions for making writing common machine learning pipelines easier. This includes functionality for:

  • An extensible dataset interface (numobs and getobs).
  • Data iteration and data loaders (eachobs and DataLoader).
  • Lazy data views (obsview).
  • Resampling procedures (undersample and oversample).
  • Train/test splits (splitobs)
  • Data partitioning and aggregation tools (batch, unbatch, chunk, group_counts, group_indices).
  • Folds for cross-validation (kfolds, leavepout).
  • Datasets lazy transformations (mapobs, filterobs, groupobs, joinobs, shuffleobs).
  • Toy datasets for demonstration purpose.
  • Other data handling utilities (flatten, normalise, unsqueeze, stack, unstack).
diff --git a/dev/highlevels/implicit_layers/index.html b/dev/highlevels/implicit_layers/index.html index 28cd86e10a2..abb4214c148 100644 --- a/dev/highlevels/implicit_layers/index.html +++ b/dev/highlevels/implicit_layers/index.html @@ -1,2 +1,2 @@ -Implicit Layer Deep Learning · Overview of Julia's SciML

Implicit Layer Deep Learning

Implicit layer deep learning is a field which uses implicit rules, such as differential equations and nonlinear solvers, to define the layers of neural networks. This field has brought the potential to automatically optimize network depth and improve training performance. SciML's differentiable solver ecosystem is specifically designed to accommodate implicit layer methodologies, and provides libraries with pre-built layers for common methods.

DiffEqFlux.jl: High Level Pre-Built Architectures for Implicit Deep Learning

DiffEqFlux.jl is a library of pre-built architectures for implicit deep learning, including layer definitions for methods like:

DeepEquilibriumNetworks.jl: Deep Equilibrium Models Made Fast

DeepEquilibriumNetworks.jl is a library of optimized layer implementations for Deep Equilibrium Models (DEQs). It uses special training techniques such as implicit-explicit regularization in order to accelerate the convergence over traditional implementations, all while using the optimized and flexible SciML libraries under the hood.

+Implicit Layer Deep Learning · Overview of Julia's SciML

Implicit Layer Deep Learning

Implicit layer deep learning is a field which uses implicit rules, such as differential equations and nonlinear solvers, to define the layers of neural networks. This field has brought the potential to automatically optimize network depth and improve training performance. SciML's differentiable solver ecosystem is specifically designed to accommodate implicit layer methodologies, and provides libraries with pre-built layers for common methods.

DiffEqFlux.jl: High Level Pre-Built Architectures for Implicit Deep Learning

DiffEqFlux.jl is a library of pre-built architectures for implicit deep learning, including layer definitions for methods like:

DeepEquilibriumNetworks.jl: Deep Equilibrium Models Made Fast

DeepEquilibriumNetworks.jl is a library of optimized layer implementations for Deep Equilibrium Models (DEQs). It uses special training techniques such as implicit-explicit regularization in order to accelerate the convergence over traditional implementations, all while using the optimized and flexible SciML libraries under the hood.

diff --git a/dev/highlevels/interfaces/index.html b/dev/highlevels/interfaces/index.html index c4ce3f86760..fecae70d0f5 100644 --- a/dev/highlevels/interfaces/index.html +++ b/dev/highlevels/interfaces/index.html @@ -1,2 +1,2 @@ -The SciML Interface Libraries · Overview of Julia's SciML

The SciML Interface Libraries

SciMLBase.jl: The SciML Common Interface

SciMLBase.jl defines the core interfaces of the SciML libraries, such as the definitions of abstract types like SciMLProblem, along with their instantiations like ODEProblem. While SciMLBase.jl is insufficient to solve any equations, it holds all the equation definitions, and thus downstream libraries which wish to allow for using SciML solvers without depending on any solvers can directly depend on SciMLBase.jl.

SciMLOperators.jl: The AbstractSciMLOperator Interface

SciMLOperators.jl defines the interface for how matrix-free linear and affine operators are defined and used throughout the SciML ecosystem.

DiffEqNoiseProcess.jl: The SciML Common Noise Interface

DiffEqNoiseProcess.jl defines the common interface for stochastic noise processes used by the equation solvers of the SciML ecosystem.

CommonSolve.jl: The Common Definition of Solve

CommonSolve.jl is the library that defines the solve, solve!, and init interfaces which are used throughout all the SciML equation solvers. It's defined as an extremely lightweight library so that other ecosystems can build on the same solve definition without clashing with SciML when both export.

Static.jl: A Shared Interface for Static Compile-Time Computation

Static.jl is a set of statically parameterized types for performing operations in a statically-defined (compiler-optimized) way with respect to values.

DiffEqBase.jl: A Library of Shared Components for Differential Equation Solvers

DiffEqBase.jl is the core shared component of the DifferentialEquations.jl ecosystem. It's not intended for non-developer users to interface directly with, instead it's used for the common functionality for uniformity of implementation between the solver libraries.

Third-Party Libraries to Note

ArrayInterface.jl: Extensions to the Julia AbstractArray Interface

ArrayInterface.jl are traits and functions which extend the Julia Base AbstractArray interface, giving a much larger set of queries to allow for writing high-performance generic code over all array types. For example, functions include can_change_size to know if an AbstractArray type is compatible with resize!, fast_scalar_indexing to know whether direct scalar indexing A[i] is optimized, and functions like findstructralnz to get the structural non-zeros of arbitrary sparse and structured matrices.

Adapt.jl: Conversion to Allow Chip-Generic Programs

Adapt.jl makes it possible to write code that is generic to the compute devices, i.e. code that works on both CPUs and GPUs. It defines the adapt function which acts like convert(T, x), but without the restriction of returning a T. This allows you to “convert” wrapper types, like Adjoint to be GPU compatible (for example) without throwing away the wrapper.

Example usage:

adapt(CuArray, ::Adjoint{Array})::Adjoint{CuArray}

AbstractFFTs.jl: High Level Shared Interface for Fast Fourier Transformation Libraries

AbstractFFTs.jl defines the common interface for Fast Fourier Transformations (FFTs) in Julia. Similar to SciMLBase.jl, AbstractFFTs.jl is not a solver library but instead a shared API which is extended by solver libraries such as FFTW.jl. Code written using AbstractFFTs.jl can be made compatible with FFT libraries without having an explicit dependency on a solver.

GPUArrays.jl: Common Interface for GPU-Based Array Types

GPUArrays.jl defines the shared higher-level operations for GPU-based array types, like CUDA.jl's CuArray and AMDGPU.jl's ROCmArray. Packages in SciML use the designation x isa AbstractGPUArray in order to find out if a user's operation is on the GPU and specialize computations.

RecipesBase.jl: Standard Plotting Recipe Interface

RecipesBase.jl defines the common interface for plotting recipes, composable transformations of Julia data types into simpler data types for visualization with libraries such as Plots.jl and Makie.jl. SciML libraries attempt to always include plot recipes wherever possible for ease of visualization.

Tables.jl: Common Interface for Tabular Data Types

Tables.jl is a common interface for defining tabular data structures, such as DataFrames.jl. SciML's libraries extend the Tables.jl interface to allow for automated conversions into data frame libraries without explicit dependence on any singular implementation.

+The SciML Interface Libraries · Overview of Julia's SciML

The SciML Interface Libraries

SciMLBase.jl: The SciML Common Interface

SciMLBase.jl defines the core interfaces of the SciML libraries, such as the definitions of abstract types like SciMLProblem, along with their instantiations like ODEProblem. While SciMLBase.jl is insufficient to solve any equations, it holds all the equation definitions, and thus downstream libraries which wish to allow for using SciML solvers without depending on any solvers can directly depend on SciMLBase.jl.

SciMLOperators.jl: The AbstractSciMLOperator Interface

SciMLOperators.jl defines the interface for how matrix-free linear and affine operators are defined and used throughout the SciML ecosystem.

DiffEqNoiseProcess.jl: The SciML Common Noise Interface

DiffEqNoiseProcess.jl defines the common interface for stochastic noise processes used by the equation solvers of the SciML ecosystem.

CommonSolve.jl: The Common Definition of Solve

CommonSolve.jl is the library that defines the solve, solve!, and init interfaces which are used throughout all the SciML equation solvers. It's defined as an extremely lightweight library so that other ecosystems can build on the same solve definition without clashing with SciML when both export.

Static.jl: A Shared Interface for Static Compile-Time Computation

Static.jl is a set of statically parameterized types for performing operations in a statically-defined (compiler-optimized) way with respect to values.

DiffEqBase.jl: A Library of Shared Components for Differential Equation Solvers

DiffEqBase.jl is the core shared component of the DifferentialEquations.jl ecosystem. It's not intended for non-developer users to interface directly with, instead it's used for the common functionality for uniformity of implementation between the solver libraries.

Third-Party Libraries to Note

ArrayInterface.jl: Extensions to the Julia AbstractArray Interface

ArrayInterface.jl are traits and functions which extend the Julia Base AbstractArray interface, giving a much larger set of queries to allow for writing high-performance generic code over all array types. For example, functions include can_change_size to know if an AbstractArray type is compatible with resize!, fast_scalar_indexing to know whether direct scalar indexing A[i] is optimized, and functions like findstructralnz to get the structural non-zeros of arbitrary sparse and structured matrices.

Adapt.jl: Conversion to Allow Chip-Generic Programs

Adapt.jl makes it possible to write code that is generic to the compute devices, i.e. code that works on both CPUs and GPUs. It defines the adapt function which acts like convert(T, x), but without the restriction of returning a T. This allows you to “convert” wrapper types, like Adjoint to be GPU compatible (for example) without throwing away the wrapper.

Example usage:

adapt(CuArray, ::Adjoint{Array})::Adjoint{CuArray}

AbstractFFTs.jl: High Level Shared Interface for Fast Fourier Transformation Libraries

AbstractFFTs.jl defines the common interface for Fast Fourier Transformations (FFTs) in Julia. Similar to SciMLBase.jl, AbstractFFTs.jl is not a solver library but instead a shared API which is extended by solver libraries such as FFTW.jl. Code written using AbstractFFTs.jl can be made compatible with FFT libraries without having an explicit dependency on a solver.

GPUArrays.jl: Common Interface for GPU-Based Array Types

GPUArrays.jl defines the shared higher-level operations for GPU-based array types, like CUDA.jl's CuArray and AMDGPU.jl's ROCmArray. Packages in SciML use the designation x isa AbstractGPUArray in order to find out if a user's operation is on the GPU and specialize computations.

RecipesBase.jl: Standard Plotting Recipe Interface

RecipesBase.jl defines the common interface for plotting recipes, composable transformations of Julia data types into simpler data types for visualization with libraries such as Plots.jl and Makie.jl. SciML libraries attempt to always include plot recipes wherever possible for ease of visualization.

Tables.jl: Common Interface for Tabular Data Types

Tables.jl is a common interface for defining tabular data structures, such as DataFrames.jl. SciML's libraries extend the Tables.jl interface to allow for automated conversions into data frame libraries without explicit dependence on any singular implementation.

diff --git a/dev/highlevels/inverse_problems/index.html b/dev/highlevels/inverse_problems/index.html index 4cc978d96e6..c3c24eb5785 100644 --- a/dev/highlevels/inverse_problems/index.html +++ b/dev/highlevels/inverse_problems/index.html @@ -1,2 +1,2 @@ -Parameter Estimation, Bayesian Analysis, and Inverse Problems · Overview of Julia's SciML

Parameter Estimation, Bayesian Analysis, and Inverse Problems

Parameter estimation for models and equations, also known as dynamic data analysis, solving the inverse problem, or Bayesian posterior estimation (when done probabilistically), is provided by the SciML tools for the equations in its set. In this introduction, we briefly present the relevant packages that facilitate parameter estimation, namely:

We also provide information regarding the respective strengths of these packages so that you can easily decide which one suits your needs best.

SciMLSensitivity.jl: Local Sensitivity Analysis and Automatic Differentiation Support for Solvers

SciMLSensitivity.jl is the system for local sensitivity, which all other inverse problem methods rely on. This package defines the interactions between the equation solvers and automatic differentiation, defining fast overloads for forward and adjoint (reverse) sensitivity analysis for fast gradient and Jacobian calculations with respect to model inputs. Its documentation covers how to use direct differentiation of equation solvers in conjunction with tools like Optimization.jl to perform model calibration of ODEs against data, PDE-constrained optimization, nonlinear optimal controls analysis, and much more. As a lower level tool, this library is very versatile, feature-rich, and high-performance, giving all the tools required but not directly providing a higher level interface.

Note

Sensitivity analysis is kept in a separate library from the solvers (SciMLSensitivity.jl), in order to not require all equation solvers to have a dependency on all automatic differentiation libraries. If automatic differentiation is applied to a solver library without importing SciMLSensitivity.jl, an error is thrown letting the user know to import SciMLSensitivity.jl for the functionality to exist.

DataDrivenDiffEq.jl: Data-Driven Modeling and Equation Discovery

The distinguishing feature of this package is that its ultimate goal is to identify the differential equation model that generated the input data. Depending on the user's needs, the package can provide structural identification of a given differential equation (output in a symbolic form) or structural estimation (output as a function for prediction purposes).

DiffEqParamEstim.jl: Simplified Parameter Estimation Interface

This package is for simplified parameter estimation. While not as flexible of a system like DiffEqFlux.jl, it provides ready-made functions for doing standard optimization procedures like L2 fitting and MAP estimates. Among other features, it allows for the optimization of parameters in ODEs, stochastic problems, and delay differential equations.

DiffEqBayes.jl: Simplified Bayesian Estimation Interface

As the name suggests, this package has been designed to provide the estimation of differential equations parameters by Bayesian methods. It works in conjunction with Turing.jl, CmdStan.jl, DynamicHMC.jl, and ApproxBayes.jl. While not as flexible as direct usage of DiffEqFlux.jl or Turing.jl, DiffEqBayes.jl can be an approachable interface for those not familiar with Bayesian estimation, and provides a nice way to use Stan from pure Julia.

Third-Party Tools of Note

Turing.jl: A Flexible Probabilistic Programming Language for Bayesian Analysis

In the context of differential equations and parameter estimation, Turing.jl allows for a Bayesian estimation of differential equations (used in conjunction with the high-level package DiffEqBayes.jl). For more examples on combining Turing.jl with DiffEqBayes.jl, see the documentation below. It is important to note that Turing.jl can also perform Bayesian estimation without relying on DiffEqBayes.jl (for an example, consult this tutorial).

Topopt.jl: Topology Optimization in Julia

Topopt.jl solves topology optimization problems which are inverse problems on partial differential equations, solving for an optimal domain.

Recommended Automatic Differentiation Libraries

Solving inverse problems commonly requires using automatic differentiation (AD). SciML includes extensive support for automatic differentiation throughout its solvers, though some AD libraries are more tested than others. The following libraries are the current recommendations of the SciML developers.

ForwardDiff.jl: Operator-Overloading Forward Mode Automatic Differentiation

ForwardDiff.jl is a library for operator-overloading based forward-mode automatic differentiation. It's commonly used as the default method for generating Jacobians throughout the SciML solver libraries.

Note

Because ForwardDiff.jl uses an operator overloading approach, uses of ForwardDiff.jl require that any caches for non-allocating mutating code allows for Dual numbers. To allow such code to be ForwardDiff.jl-compatible, see PreallocationTools.jl.

Enzyme.jl: LLVM-Level Forward and Reverse Mode Automatic Differentiation

Enzyme.jl is an LLVM-level AD library for forward and reverse automatic differentiation. It supports many features required for high performance, such as being able to differentiate mutating and interleave compiler optimization with the AD passes. However, it does not support all of the Julia runtime, and thus some code with many dynamic behaviors and garbage collection (GC) invocations can be incompatible with Enzyme. Enzyme.jl is quickly becoming the new standard AD for SciML.

Zygote.jl: Julia-Level Source-to-Source Reverse Mode Automatic Differentiation

Zygote.jl is the current standard user-level reverse-mode automatic differentiation library for the SciML solvers. User-level means that many library tutorials, like in SciMLSensitivity.jl and DiffEqFlux.jl, showcase user code using Zygote.jl. This is because Zygote.jl is the AD engine associated with the Flux machine learning library. However, Zygote.jl has many limitations which limits its performance in equation solver contexts, such as an inability to handle mutation and introducing many small allocations and type-instabilities. For this reason, the SciML equation solvers define differentiation overloads using ChainRules.jl, meaning that the equation solvers tend not to use Zygote.jl internally even if the user code uses Zygote.gradient. In this manner, the speed and performance of more advanced techniques can be preserved while using the Julia standard.

FiniteDiff.jl: Fast Finite Difference Approximations

FiniteDiff.jl is the preferred fallback library for numerical differentiation and is commonly used by SciML solver libraries when automatic differentiation is disabled.

SparseDiffTools.jl: Tools for Fast Automatic Differentiation with Sparse Operators

SparseDiffTools.jl is a library for sparse automatic differentiation. It's used internally by many of the SciML equation solver libraries, which explicitly expose interfaces for colorvec color vectors generated by SparseDiffTools.jl's methods. SparseDiffTools.jl also includes many features useful to users, such as operators for matrix-free Jacobian-vector and Hessian-vector products.

+Parameter Estimation, Bayesian Analysis, and Inverse Problems · Overview of Julia's SciML

Parameter Estimation, Bayesian Analysis, and Inverse Problems

Parameter estimation for models and equations, also known as dynamic data analysis, solving the inverse problem, or Bayesian posterior estimation (when done probabilistically), is provided by the SciML tools for the equations in its set. In this introduction, we briefly present the relevant packages that facilitate parameter estimation, namely:

We also provide information regarding the respective strengths of these packages so that you can easily decide which one suits your needs best.

SciMLSensitivity.jl: Local Sensitivity Analysis and Automatic Differentiation Support for Solvers

SciMLSensitivity.jl is the system for local sensitivity, which all other inverse problem methods rely on. This package defines the interactions between the equation solvers and automatic differentiation, defining fast overloads for forward and adjoint (reverse) sensitivity analysis for fast gradient and Jacobian calculations with respect to model inputs. Its documentation covers how to use direct differentiation of equation solvers in conjunction with tools like Optimization.jl to perform model calibration of ODEs against data, PDE-constrained optimization, nonlinear optimal controls analysis, and much more. As a lower level tool, this library is very versatile, feature-rich, and high-performance, giving all the tools required but not directly providing a higher level interface.

Note

Sensitivity analysis is kept in a separate library from the solvers (SciMLSensitivity.jl), in order to not require all equation solvers to have a dependency on all automatic differentiation libraries. If automatic differentiation is applied to a solver library without importing SciMLSensitivity.jl, an error is thrown letting the user know to import SciMLSensitivity.jl for the functionality to exist.

DataDrivenDiffEq.jl: Data-Driven Modeling and Equation Discovery

The distinguishing feature of this package is that its ultimate goal is to identify the differential equation model that generated the input data. Depending on the user's needs, the package can provide structural identification of a given differential equation (output in a symbolic form) or structural estimation (output as a function for prediction purposes).

DiffEqParamEstim.jl: Simplified Parameter Estimation Interface

This package is for simplified parameter estimation. While not as flexible of a system like DiffEqFlux.jl, it provides ready-made functions for doing standard optimization procedures like L2 fitting and MAP estimates. Among other features, it allows for the optimization of parameters in ODEs, stochastic problems, and delay differential equations.

DiffEqBayes.jl: Simplified Bayesian Estimation Interface

As the name suggests, this package has been designed to provide the estimation of differential equations parameters by Bayesian methods. It works in conjunction with Turing.jl, CmdStan.jl, DynamicHMC.jl, and ApproxBayes.jl. While not as flexible as direct usage of DiffEqFlux.jl or Turing.jl, DiffEqBayes.jl can be an approachable interface for those not familiar with Bayesian estimation, and provides a nice way to use Stan from pure Julia.

Third-Party Tools of Note

Turing.jl: A Flexible Probabilistic Programming Language for Bayesian Analysis

In the context of differential equations and parameter estimation, Turing.jl allows for a Bayesian estimation of differential equations (used in conjunction with the high-level package DiffEqBayes.jl). For more examples on combining Turing.jl with DiffEqBayes.jl, see the documentation below. It is important to note that Turing.jl can also perform Bayesian estimation without relying on DiffEqBayes.jl (for an example, consult this tutorial).

Topopt.jl: Topology Optimization in Julia

Topopt.jl solves topology optimization problems which are inverse problems on partial differential equations, solving for an optimal domain.

Recommended Automatic Differentiation Libraries

Solving inverse problems commonly requires using automatic differentiation (AD). SciML includes extensive support for automatic differentiation throughout its solvers, though some AD libraries are more tested than others. The following libraries are the current recommendations of the SciML developers.

ForwardDiff.jl: Operator-Overloading Forward Mode Automatic Differentiation

ForwardDiff.jl is a library for operator-overloading based forward-mode automatic differentiation. It's commonly used as the default method for generating Jacobians throughout the SciML solver libraries.

Note

Because ForwardDiff.jl uses an operator overloading approach, uses of ForwardDiff.jl require that any caches for non-allocating mutating code allows for Dual numbers. To allow such code to be ForwardDiff.jl-compatible, see PreallocationTools.jl.

Enzyme.jl: LLVM-Level Forward and Reverse Mode Automatic Differentiation

Enzyme.jl is an LLVM-level AD library for forward and reverse automatic differentiation. It supports many features required for high performance, such as being able to differentiate mutating and interleave compiler optimization with the AD passes. However, it does not support all of the Julia runtime, and thus some code with many dynamic behaviors and garbage collection (GC) invocations can be incompatible with Enzyme. Enzyme.jl is quickly becoming the new standard AD for SciML.

Zygote.jl: Julia-Level Source-to-Source Reverse Mode Automatic Differentiation

Zygote.jl is the current standard user-level reverse-mode automatic differentiation library for the SciML solvers. User-level means that many library tutorials, like in SciMLSensitivity.jl and DiffEqFlux.jl, showcase user code using Zygote.jl. This is because Zygote.jl is the AD engine associated with the Flux machine learning library. However, Zygote.jl has many limitations which limits its performance in equation solver contexts, such as an inability to handle mutation and introducing many small allocations and type-instabilities. For this reason, the SciML equation solvers define differentiation overloads using ChainRules.jl, meaning that the equation solvers tend not to use Zygote.jl internally even if the user code uses Zygote.gradient. In this manner, the speed and performance of more advanced techniques can be preserved while using the Julia standard.

FiniteDiff.jl: Fast Finite Difference Approximations

FiniteDiff.jl is the preferred fallback library for numerical differentiation and is commonly used by SciML solver libraries when automatic differentiation is disabled.

SparseDiffTools.jl: Tools for Fast Automatic Differentiation with Sparse Operators

SparseDiffTools.jl is a library for sparse automatic differentiation. It's used internally by many of the SciML equation solver libraries, which explicitly expose interfaces for colorvec color vectors generated by SparseDiffTools.jl's methods. SparseDiffTools.jl also includes many features useful to users, such as operators for matrix-free Jacobian-vector and Hessian-vector products.

diff --git a/dev/highlevels/learning_resources/index.html b/dev/highlevels/learning_resources/index.html index badbdc6635c..cd567cfec5d 100644 --- a/dev/highlevels/learning_resources/index.html +++ b/dev/highlevels/learning_resources/index.html @@ -1,2 +1,2 @@ -Curated Learning, Teaching, and Training Resources · Overview of Julia's SciML

Curated Learning, Teaching, and Training Resources

While the SciML documentation is made to be comprehensive, there will always be good alternative resources. The purpose of this section of the documentation is to highlight the alternative resources which can be helpful for learning how to use the SciML Open-Source Software libraries.

JuliaCon and SciMLCon Videos

Many tutorials and introductions to packages have been taught through previous JuliaCon/SciMLCon workshops and talks. The following is a curated list of such training videos:

SciML Book: Parallel Computing and Scientific Machine Learning (SciML): Methods and Applications

The book Parallel Computing and Scientific Machine Learning (SciML): Methods and Applications is a compilation of the lecture notes from the MIT Course 18.337J/6.338J: Parallel Computing and Scientific Machine Learning. It contains a walkthrough of many of the methods implemented in the SciML libraries, as well as how to understand much of the functionality at a deeper level. This course was intended for MIT graduate students in engineering, computer science, and mathematics and thus may have a high prerequisite requirement than many other resources.

sir-julia: Various implementations of the classical SIR model in Julia

For those who like to learn by example, the repository sir-julia is a great resource! It showcases how to use the SciML libraries in many different ways to simulate different variations of the classic SIR epidemic model.

Other Books Featuring SciML

+Curated Learning, Teaching, and Training Resources · Overview of Julia's SciML

Curated Learning, Teaching, and Training Resources

While the SciML documentation is made to be comprehensive, there will always be good alternative resources. The purpose of this section of the documentation is to highlight the alternative resources which can be helpful for learning how to use the SciML Open-Source Software libraries.

JuliaCon and SciMLCon Videos

Many tutorials and introductions to packages have been taught through previous JuliaCon/SciMLCon workshops and talks. The following is a curated list of such training videos:

SciML Book: Parallel Computing and Scientific Machine Learning (SciML): Methods and Applications

The book Parallel Computing and Scientific Machine Learning (SciML): Methods and Applications is a compilation of the lecture notes from the MIT Course 18.337J/6.338J: Parallel Computing and Scientific Machine Learning. It contains a walkthrough of many of the methods implemented in the SciML libraries, as well as how to understand much of the functionality at a deeper level. This course was intended for MIT graduate students in engineering, computer science, and mathematics and thus may have a high prerequisite requirement than many other resources.

sir-julia: Various implementations of the classical SIR model in Julia

For those who like to learn by example, the repository sir-julia is a great resource! It showcases how to use the SciML libraries in many different ways to simulate different variations of the classic SIR epidemic model.

Other Books Featuring SciML

diff --git a/dev/highlevels/model_libraries_and_importers/index.html b/dev/highlevels/model_libraries_and_importers/index.html index cf4584ea0a1..35fb0d87a93 100644 --- a/dev/highlevels/model_libraries_and_importers/index.html +++ b/dev/highlevels/model_libraries_and_importers/index.html @@ -1,2 +1,2 @@ -Model Libraries and Importers · Overview of Julia's SciML

Model Libraries and Importers

Models are passed on from generation to generation. Many models are not built from scratch but have a legacy of the known physics, biology, and chemistry embedded into them. Julia's SciML offers a range of pre-built modeling tools, from reusable acausal components to direct imports from common file formats.

ModelingToolkitStandardLibrary.jl: A Standard Library for ModelingToolkit

Given the composable nature of acausal modeling systems, it's helpful to not have to define every component from scratch and instead build off a common base of standard components. ModelingToolkitStandardLibrary.jl is that library. It provides components for standard models to start building everything from circuits and engines to robots.

DiffEqCallbacks.jl: Pre-Made Callbacks for DifferentialEquations.jl

DiffEqCallbacks.jl has many event handling and callback definitions which allow for quickly building up complex differential equation models. It includes:

  • Callbacks for specialized output and saving procedures
  • Callbacks for enforcing domain constraints, positivity, and manifolds
  • Timed callbacks for periodic dosing, presetting of tstops, and more
  • Callbacks for determining and terminating at steady state
  • Callbacks for controlling stepsizes and enforcing CFL conditions
  • Callbacks for quantifying uncertainty with respect to numerical errors

SBMLToolkit.jl: SBML Import

SBMLToolkit.jl is a library for reading SBML files into the standard formats for Catalyst.jl and ModelingToolkit.jl. There are well over one thousand biological models available in the BioModels Repository.

CellMLToolkit.jl: CellML Import

CellMLToolkit.jl is a library for reading CellML files into the standard formats for ModelingToolkit.jl. There are several hundred biological models available in the CellML Model Repository.

ReactionNetworkImporters.jl: BioNetGen Import

ReactionNetworkImporters.jl is a library for reading BioNetGen .net files and various stoichiometry matrix representations into the standard formats for Catalyst.jl and ModelingToolkit.jl.

+Model Libraries and Importers · Overview of Julia's SciML

Model Libraries and Importers

Models are passed on from generation to generation. Many models are not built from scratch but have a legacy of the known physics, biology, and chemistry embedded into them. Julia's SciML offers a range of pre-built modeling tools, from reusable acausal components to direct imports from common file formats.

ModelingToolkitStandardLibrary.jl: A Standard Library for ModelingToolkit

Given the composable nature of acausal modeling systems, it's helpful to not have to define every component from scratch and instead build off a common base of standard components. ModelingToolkitStandardLibrary.jl is that library. It provides components for standard models to start building everything from circuits and engines to robots.

DiffEqCallbacks.jl: Pre-Made Callbacks for DifferentialEquations.jl

DiffEqCallbacks.jl has many event handling and callback definitions which allow for quickly building up complex differential equation models. It includes:

  • Callbacks for specialized output and saving procedures
  • Callbacks for enforcing domain constraints, positivity, and manifolds
  • Timed callbacks for periodic dosing, presetting of tstops, and more
  • Callbacks for determining and terminating at steady state
  • Callbacks for controlling stepsizes and enforcing CFL conditions
  • Callbacks for quantifying uncertainty with respect to numerical errors

SBMLToolkit.jl: SBML Import

SBMLToolkit.jl is a library for reading SBML files into the standard formats for Catalyst.jl and ModelingToolkit.jl. There are well over one thousand biological models available in the BioModels Repository.

CellMLToolkit.jl: CellML Import

CellMLToolkit.jl is a library for reading CellML files into the standard formats for ModelingToolkit.jl. There are several hundred biological models available in the CellML Model Repository.

ReactionNetworkImporters.jl: BioNetGen Import

ReactionNetworkImporters.jl is a library for reading BioNetGen .net files and various stoichiometry matrix representations into the standard formats for Catalyst.jl and ModelingToolkit.jl.

diff --git a/dev/highlevels/modeling_languages/index.html b/dev/highlevels/modeling_languages/index.html index ab79b131301..2948e07a11e 100644 --- a/dev/highlevels/modeling_languages/index.html +++ b/dev/highlevels/modeling_languages/index.html @@ -1,2 +1,2 @@ -Modeling Languages · Overview of Julia's SciML

Modeling Languages

While in theory one can build perfect code for all models from scratch, in practice many scientists and engineers need or want some help! The SciML modeling tools provide a higher level interface over the equation solver, which helps the translation from good models to good simulations in a way that abstracts away the mathematical and computational details without giving up performance.

ModelingToolkit.jl: Acausal Symbolic Modeling

Acausal modeling is an extension of causal modeling that is more composable and allows for more code reuse. Build a model of an electric engine, then build a model of a battery, and now declare connections by stating "the voltage at the engine equals the voltage at the connector of the battery", and generate the composed model. The tool for this is ModelingToolkit.jl. ModelingToolkit.jl is a sophisticated symbolic modeling library which allows for specifying these types of large-scale differential equation models in a simple way, abstracting away the computational details. However, its symbolic analysis allows for generating much more performant code for differential-algebraic equations than most users could ever write by hand, with its structural_simplify automatically correcting the model to improve parallelism, numerical stability, and automatically remove variables which it can show are redundant.

ModelingToolkit.jl is the base of the SciML symbolic modeling ecosystem, defining the AbstractSystem types, such as ODESystem, SDESystem, OptimizationSystem, PDESystem, and more, which are then used by all the other modeling tools. As such, when using other modeling tools like Catalyst.jl, the reference for all the things that can be done with the symbolic representation is simply ModelingToolkit.jl.

Catalyst.jl: Chemical Reaction Networks (CRN), Systems Biology, and Quantitative Systems Pharmacology (QSP) Modeling

Catalyst.jl is a modeling interface for efficient simulation of mass action ODE, chemical Langevin SDE, and stochastic chemical kinetics jump process (i.e. chemical master equation) models for chemical reaction networks and population processes. It uses a highly intuitive chemical reaction syntax interface, which generates all the extra functionality necessary for the fastest use with JumpProcesses.jl, DifferentialEquations.jl, and higher level SciML libraries. Its ReactionSystem type is a programmable extension of the ModelingToolkit AbstractSystem interface, meaning that complex reaction systems are represented symbolically, and then compiled to optimized representations automatically when converting ReactionSystems to concrete ODE/SDE/jump process representations. Catalyst also provides functionality to support chemical reaction network and steady-state analysis.

For an overview of the library, see Modeling Biochemical Systems with Catalyst.jl - Samuel Isaacson

NBodySimulator.jl: A differentiable simulator for N-body problems, including astrophysical and molecular dynamics

NBodySimulator.jl is a differentiable simulator for N-body problems, including astrophysical and molecular dynamics. It uses the DifferentialEquations.jl solvers, allowing for one to choose between a large variety of symplectic integration schemes. It implements many of the thermostats required for doing standard molecular dynamics approximations.

DiffEqFinancial.jl: Financial models for use in the DifferentialEquations ecosystem

The goal of DiffEqFinancial.jl is to be a feature-complete set of solvers for the types of problems found in libraries like QuantLib, such as the Heston process or the Black-Scholes model.

ParameterizedFunctions.jl: Simple Differential Equation Definitions Made Easy

This image that went viral is actually runnable code from ParameterizedFunctions.jl. Define equations and models using a very simple high-level syntax and let the code generation tools build symbolic fast Jacobian, gradient, etc. functions for you.

Third-Party Tools of Note

MomentClosure.jl: Automated Generation of Moment Closure Equations

MomentClosure.jl is a library for generating the moment closure equations for a given chemical master equation or stochastic differential equation. Thus instead of solving a stochastic model thousands of times to find the mean and variance, this library can generate the deterministic equations for how the mean and variance evolve in order to be solved in a single run. MomentClosure.jl uses Catalyst ReactionSystem and ModelingToolkit SDESystem types as the input for its symbolic generation processes.

Agents.jl: Agent-Based Modeling Framework in Julia

If one wants to do agent-based modeling in Julia, Agents.jl is the go-to library. It's fast and flexible, making it a solid foundation for any agent-based model.

Unitful.jl: A Julia package for physical units

Supports not only SI units, but also any other unit system. Unitful.jl has minimal run-time penalty of units. Includes facilities for dimensional analysis, and integrates easily with the usual mathematical operations and collections that are defined in Julia.

ReactionMechanismSimulator.jl: Simulation and Analysis of Large Chemical Reaction Systems

ReactionMechanismSimulator.jl is a tool for simulating and analyzing large chemical reaction mechanisms. It interfaces with the ReactionMechanismGenerator suite for automatically constructing reaction pathways from chemical components to quickly build realistic models of chemical systems.

FiniteStateProjection.jl: Direct Solution of Chemical Master Equations

FiniteStateProjection.jl is a library for finite state projection direct solving of the chemical master equation. It automatically converts the Catalyst ReactionSystem definitions into ModelingToolkit ODESystem representations for the evolution of probability distributions to allow for directly solving the weak form of the stochastic model.

AlgebraicPetri.jl: Applied Category Theory of Modeling

AlgebraicPetri.jl is a library for automating the intuitive generation of dynamical models using a Category theory-based approach.

QuantumOptics.jl: Simulating quantum systems.

QuantumOptics.jl makes it easy to simulate various kinds of quantum systems. It is inspired by the Quantum Optics Toolbox for MATLAB and the Python framework QuTiP.

+Modeling Languages · Overview of Julia's SciML

Modeling Languages

While in theory one can build perfect code for all models from scratch, in practice many scientists and engineers need or want some help! The SciML modeling tools provide a higher level interface over the equation solver, which helps the translation from good models to good simulations in a way that abstracts away the mathematical and computational details without giving up performance.

ModelingToolkit.jl: Acausal Symbolic Modeling

Acausal modeling is an extension of causal modeling that is more composable and allows for more code reuse. Build a model of an electric engine, then build a model of a battery, and now declare connections by stating "the voltage at the engine equals the voltage at the connector of the battery", and generate the composed model. The tool for this is ModelingToolkit.jl. ModelingToolkit.jl is a sophisticated symbolic modeling library which allows for specifying these types of large-scale differential equation models in a simple way, abstracting away the computational details. However, its symbolic analysis allows for generating much more performant code for differential-algebraic equations than most users could ever write by hand, with its structural_simplify automatically correcting the model to improve parallelism, numerical stability, and automatically remove variables which it can show are redundant.

ModelingToolkit.jl is the base of the SciML symbolic modeling ecosystem, defining the AbstractSystem types, such as ODESystem, SDESystem, OptimizationSystem, PDESystem, and more, which are then used by all the other modeling tools. As such, when using other modeling tools like Catalyst.jl, the reference for all the things that can be done with the symbolic representation is simply ModelingToolkit.jl.

Catalyst.jl: Chemical Reaction Networks (CRN), Systems Biology, and Quantitative Systems Pharmacology (QSP) Modeling

Catalyst.jl is a modeling interface for efficient simulation of mass action ODE, chemical Langevin SDE, and stochastic chemical kinetics jump process (i.e. chemical master equation) models for chemical reaction networks and population processes. It uses a highly intuitive chemical reaction syntax interface, which generates all the extra functionality necessary for the fastest use with JumpProcesses.jl, DifferentialEquations.jl, and higher level SciML libraries. Its ReactionSystem type is a programmable extension of the ModelingToolkit AbstractSystem interface, meaning that complex reaction systems are represented symbolically, and then compiled to optimized representations automatically when converting ReactionSystems to concrete ODE/SDE/jump process representations. Catalyst also provides functionality to support chemical reaction network and steady-state analysis.

For an overview of the library, see Modeling Biochemical Systems with Catalyst.jl - Samuel Isaacson

NBodySimulator.jl: A differentiable simulator for N-body problems, including astrophysical and molecular dynamics

NBodySimulator.jl is a differentiable simulator for N-body problems, including astrophysical and molecular dynamics. It uses the DifferentialEquations.jl solvers, allowing for one to choose between a large variety of symplectic integration schemes. It implements many of the thermostats required for doing standard molecular dynamics approximations.

DiffEqFinancial.jl: Financial models for use in the DifferentialEquations ecosystem

The goal of DiffEqFinancial.jl is to be a feature-complete set of solvers for the types of problems found in libraries like QuantLib, such as the Heston process or the Black-Scholes model.

ParameterizedFunctions.jl: Simple Differential Equation Definitions Made Easy

This image that went viral is actually runnable code from ParameterizedFunctions.jl. Define equations and models using a very simple high-level syntax and let the code generation tools build symbolic fast Jacobian, gradient, etc. functions for you.

Third-Party Tools of Note

MomentClosure.jl: Automated Generation of Moment Closure Equations

MomentClosure.jl is a library for generating the moment closure equations for a given chemical master equation or stochastic differential equation. Thus instead of solving a stochastic model thousands of times to find the mean and variance, this library can generate the deterministic equations for how the mean and variance evolve in order to be solved in a single run. MomentClosure.jl uses Catalyst ReactionSystem and ModelingToolkit SDESystem types as the input for its symbolic generation processes.

Agents.jl: Agent-Based Modeling Framework in Julia

If one wants to do agent-based modeling in Julia, Agents.jl is the go-to library. It's fast and flexible, making it a solid foundation for any agent-based model.

Unitful.jl: A Julia package for physical units

Supports not only SI units, but also any other unit system. Unitful.jl has minimal run-time penalty of units. Includes facilities for dimensional analysis, and integrates easily with the usual mathematical operations and collections that are defined in Julia.

ReactionMechanismSimulator.jl: Simulation and Analysis of Large Chemical Reaction Systems

ReactionMechanismSimulator.jl is a tool for simulating and analyzing large chemical reaction mechanisms. It interfaces with the ReactionMechanismGenerator suite for automatically constructing reaction pathways from chemical components to quickly build realistic models of chemical systems.

FiniteStateProjection.jl: Direct Solution of Chemical Master Equations

FiniteStateProjection.jl is a library for finite state projection direct solving of the chemical master equation. It automatically converts the Catalyst ReactionSystem definitions into ModelingToolkit ODESystem representations for the evolution of probability distributions to allow for directly solving the weak form of the stochastic model.

AlgebraicPetri.jl: Applied Category Theory of Modeling

AlgebraicPetri.jl is a library for automating the intuitive generation of dynamical models using a Category theory-based approach.

QuantumOptics.jl: Simulating quantum systems.

QuantumOptics.jl makes it easy to simulate various kinds of quantum systems. It is inspired by the Quantum Optics Toolbox for MATLAB and the Python framework QuTiP.

diff --git a/dev/highlevels/numerical_utilities/index.html b/dev/highlevels/numerical_utilities/index.html index 8ec765420f3..416454f1b48 100644 --- a/dev/highlevels/numerical_utilities/index.html +++ b/dev/highlevels/numerical_utilities/index.html @@ -1,2 +1,2 @@ -SciML Numerical Utility Libraries · Overview of Julia's SciML

SciML Numerical Utility Libraries

ExponentialUtilities.jl: Faster Matrix Exponentials

ExponentialUtilities.jl is a library for efficient computation of matrix exponentials. While Julia has a built-in exp(A) method, ExponentialUtilities.jl offers many features around this to improve performance in scientific contexts, including:

  • Faster methods for (non-allocating) matrix exponentials via exponential!
  • Methods for computing matrix exponential that are generic to number types and arrays (i.e. GPUs)
  • Methods for computing Arnoldi iterations on Krylov subspaces
  • Direct computation of exp(t*A)*v, i.e. exponentiation of a matrix times a vector, without computing the matrix exponential
  • Direct computation of ϕ_m(t*A)*v operations, where ϕ_0(z) = exp(z) and ϕ_(k+1)(z) = (ϕ_k(z) - 1) / z

ExponentialUtilities.jl includes complex adaptive time stepping techniques such as KIOPS in order to perform these calculations in a fast and numerically-stable way.

QuasiMonteCarlo.jl: Fast Quasi-Random Number Generation

QuasiMonteCarlo.jl is a library for fast generation of low discrepancy Quasi-Monte Carlo samples, using methods like:

  • GridSample(dx) where the grid is given by lb:dx[i]:ub in the ith direction.
  • UniformSample for uniformly distributed random numbers.
  • SobolSample for the Sobol sequence.
  • LatinHypercubeSample for a Latin Hypercube.
  • LatticeRuleSample for a randomly-shifted rank-1 lattice rule.
  • LowDiscrepancySample(base) where base[i] is the base in the ith direction.
  • GoldenSample for a Golden Ratio sequence.
  • KroneckerSample(alpha, s0) for a Kronecker sequence, where alpha is a length-d vector of irrational numbers (often sqrt(d)) and s0 is a length-d seed vector (often 0).
  • SectionSample(x0, sampler) where sampler is any sampler above and x0 is a vector of either NaN for a free dimension or some scalar for a constrained dimension.

DataInterpolations.jl: One-Dimensional Interpolations

DataInterpolations.jl is a library of one-dimensional interpolation schemes which are composable with automatic differentiation and the SciML ecosystem. It includes direct interpolation methods and regression techniques for handling noisy data. Its methods include:

  • ConstantInterpolation(u,t) - A piecewise constant interpolation.

  • LinearInterpolation(u,t) - A linear interpolation.

  • QuadraticInterpolation(u,t) - A quadratic interpolation.

  • LagrangeInterpolation(u,t,n) - A Lagrange interpolation of order n.

  • QuadraticSpline(u,t) - A quadratic spline interpolation.

  • CubicSpline(u,t) - A cubic spline interpolation.

  • BSplineInterpolation(u,t,d,pVec,knotVec) - An interpolation B-spline. This is a B-spline which hits each of the data points. The argument choices are:

    • d - degree of B-spline
    • pVec - Symbol to Parameters Vector, pVec = :Uniform for uniform spaced parameters and pVec = :ArcLen for parameters generated by chord length method.
    • knotVec - Symbol to Knot Vector, knotVec = :Uniform for uniform knot vector, knotVec = :Average for average spaced knot vector.
  • BSplineApprox(u,t,d,h,pVec,knotVec) - A regression B-spline which smooths the fitting curve. The argument choices are the same as the BSplineInterpolation, with the additional parameter h<length(t) which is the number of control points to use, with smaller h indicating more smoothing.

  • Curvefit(u,t,m,p,alg) - An interpolation which is done by fitting a user-given functional form m(t,p) where p is the vector of parameters. The user's input p is an initial value for a least-square fitting, alg is the algorithm choice used to optimize the cost function (sum of squared deviations) via Optim.jl and optimal ps are used in the interpolation.

These interpolations match the SciML interfaces and have direct support for packages like ModelingToolkit.jl.

PoissonRandom.jl: Fast Poisson Random Number Generation

PoissonRandom.jl is just fast Poisson random number generation for Poisson processes, like chemical master equations.

PreallocationTools.jl: Write Non-Allocating Code Easier

PreallocationTools.jl is a library of tools for writing non-allocating code that interacts well with advanced features like automatic differentiation and symbolics.

RuntimeGeneratedFunctions.jl: Efficient Staged Programming in Julia

RuntimeGeneratedFunctions.jl allows for staged programming in Julia, compiling functions at runtime with full optimizations. This is used by many libraries such as ModelingToolkit.jl to allow for runtime code generation for improved performance.

EllipsisNotation.jl: Implementation of Ellipsis Array Slicing

EllipsisNotation.jl defines the ellipsis array slicing notation for Julia. It uses .. as a catch-all for “all dimensions”, allowing for indexing like [..,1] to mean [:,:,:,1] on four dimensional arrays, in a way that is generic to the number of dimensions in the underlying array.

Third-Party Libraries to Note

Distributions.jl: Representations of Probability Distributions

Distributions.jl is a library for defining distributions in Julia. It's used all throughout the SciML libraries for specifications of probability distributions.

Note

For full compatibility with automatic differentiation, see DistributionsAD.jl

FFTW.jl: Fastest Fourier Transformation in the West

FFTW.jl is the preferred library for fast Fourier Transformations on the CPU.

SpecialFunctions.jl: Implementations of Mathematical Special Functions

SpecialFunctions.jl is a library of implementations of special functions, like Bessel functions and error functions (erf). This library is compatible with automatic differentiation.

LoopVectorization.jl: Automated Loop Accelerator

LoopVectorization.jl is a library which provides the @turbo and @tturbo macros for accelerating the computation of loops. This can be used to accelerating the model functions sent to the equation solvers, for example, accelerating handwritten PDE discretizations.

Polyester.jl: Cheap Threads

Polyester.jl is a cheaper version of threads for Julia, which use a set pool of threads for lower overhead. Note that Polyester does not compose with the standard Julia composable threading infrastructure, and thus one must take care not to compose two levels of Polyester, as this will oversubscribe the computation and lead to performance degradation. Many SciML solvers have options to use Polyester for threading to achieve the top performance.

Tullio.jl: Fast Tensor Calculations and Einstein Notation

Tullio.jl is a library for fast tensor calculations with Einstein notation. It allows for defining operations which are compatible with automatic differentiation, GPUs, and more.

ParallelStencil.jl: High-Level Code for Parallelized Stencil Computations

ParallelStencil.jl is a library for writing high-level code for parallelized stencil computations. It is compatible with SciML equation solvers and is thus a good way to generate GPU and distributed parallel model code.

Julia Utilities

StaticCompiler.jl

StaticCompiler.jl is a package for generating static binaries from Julia code. It only supports a subset of Julia, so not all equation solver algorithms are compatible with StaticCompiler.jl.

PackageCompiler.jl

PackageCompiler.jl is a package for generating shared libraries from Julia code. It builds the entirety of Julia by bundling a system image with the Julia runtime. It thus builds complete binaries that can hold all the functionality of SciML. Furthermore, it can also be used to generate new system images to decrease startup times and remove JIT-compilation from SciML usage.

+SciML Numerical Utility Libraries · Overview of Julia's SciML

SciML Numerical Utility Libraries

ExponentialUtilities.jl: Faster Matrix Exponentials

ExponentialUtilities.jl is a library for efficient computation of matrix exponentials. While Julia has a built-in exp(A) method, ExponentialUtilities.jl offers many features around this to improve performance in scientific contexts, including:

  • Faster methods for (non-allocating) matrix exponentials via exponential!
  • Methods for computing matrix exponential that are generic to number types and arrays (i.e. GPUs)
  • Methods for computing Arnoldi iterations on Krylov subspaces
  • Direct computation of exp(t*A)*v, i.e. exponentiation of a matrix times a vector, without computing the matrix exponential
  • Direct computation of ϕ_m(t*A)*v operations, where ϕ_0(z) = exp(z) and ϕ_(k+1)(z) = (ϕ_k(z) - 1) / z

ExponentialUtilities.jl includes complex adaptive time stepping techniques such as KIOPS in order to perform these calculations in a fast and numerically-stable way.

QuasiMonteCarlo.jl: Fast Quasi-Random Number Generation

QuasiMonteCarlo.jl is a library for fast generation of low discrepancy Quasi-Monte Carlo samples, using methods like:

  • GridSample(dx) where the grid is given by lb:dx[i]:ub in the ith direction.
  • UniformSample for uniformly distributed random numbers.
  • SobolSample for the Sobol sequence.
  • LatinHypercubeSample for a Latin Hypercube.
  • LatticeRuleSample for a randomly-shifted rank-1 lattice rule.
  • LowDiscrepancySample(base) where base[i] is the base in the ith direction.
  • GoldenSample for a Golden Ratio sequence.
  • KroneckerSample(alpha, s0) for a Kronecker sequence, where alpha is a length-d vector of irrational numbers (often sqrt(d)) and s0 is a length-d seed vector (often 0).
  • SectionSample(x0, sampler) where sampler is any sampler above and x0 is a vector of either NaN for a free dimension or some scalar for a constrained dimension.

DataInterpolations.jl: One-Dimensional Interpolations

DataInterpolations.jl is a library of one-dimensional interpolation schemes which are composable with automatic differentiation and the SciML ecosystem. It includes direct interpolation methods and regression techniques for handling noisy data. Its methods include:

  • ConstantInterpolation(u,t) - A piecewise constant interpolation.

  • LinearInterpolation(u,t) - A linear interpolation.

  • QuadraticInterpolation(u,t) - A quadratic interpolation.

  • LagrangeInterpolation(u,t,n) - A Lagrange interpolation of order n.

  • QuadraticSpline(u,t) - A quadratic spline interpolation.

  • CubicSpline(u,t) - A cubic spline interpolation.

  • BSplineInterpolation(u,t,d,pVec,knotVec) - An interpolation B-spline. This is a B-spline which hits each of the data points. The argument choices are:

    • d - degree of B-spline
    • pVec - Symbol to Parameters Vector, pVec = :Uniform for uniform spaced parameters and pVec = :ArcLen for parameters generated by chord length method.
    • knotVec - Symbol to Knot Vector, knotVec = :Uniform for uniform knot vector, knotVec = :Average for average spaced knot vector.
  • BSplineApprox(u,t,d,h,pVec,knotVec) - A regression B-spline which smooths the fitting curve. The argument choices are the same as the BSplineInterpolation, with the additional parameter h<length(t) which is the number of control points to use, with smaller h indicating more smoothing.

  • Curvefit(u,t,m,p,alg) - An interpolation which is done by fitting a user-given functional form m(t,p) where p is the vector of parameters. The user's input p is an initial value for a least-square fitting, alg is the algorithm choice used to optimize the cost function (sum of squared deviations) via Optim.jl and optimal ps are used in the interpolation.

These interpolations match the SciML interfaces and have direct support for packages like ModelingToolkit.jl.

PoissonRandom.jl: Fast Poisson Random Number Generation

PoissonRandom.jl is just fast Poisson random number generation for Poisson processes, like chemical master equations.

PreallocationTools.jl: Write Non-Allocating Code Easier

PreallocationTools.jl is a library of tools for writing non-allocating code that interacts well with advanced features like automatic differentiation and symbolics.

RuntimeGeneratedFunctions.jl: Efficient Staged Programming in Julia

RuntimeGeneratedFunctions.jl allows for staged programming in Julia, compiling functions at runtime with full optimizations. This is used by many libraries such as ModelingToolkit.jl to allow for runtime code generation for improved performance.

EllipsisNotation.jl: Implementation of Ellipsis Array Slicing

EllipsisNotation.jl defines the ellipsis array slicing notation for Julia. It uses .. as a catch-all for “all dimensions”, allowing for indexing like [..,1] to mean [:,:,:,1] on four dimensional arrays, in a way that is generic to the number of dimensions in the underlying array.

Third-Party Libraries to Note

Distributions.jl: Representations of Probability Distributions

Distributions.jl is a library for defining distributions in Julia. It's used all throughout the SciML libraries for specifications of probability distributions.

Note

For full compatibility with automatic differentiation, see DistributionsAD.jl

FFTW.jl: Fastest Fourier Transformation in the West

FFTW.jl is the preferred library for fast Fourier Transformations on the CPU.

SpecialFunctions.jl: Implementations of Mathematical Special Functions

SpecialFunctions.jl is a library of implementations of special functions, like Bessel functions and error functions (erf). This library is compatible with automatic differentiation.

LoopVectorization.jl: Automated Loop Accelerator

LoopVectorization.jl is a library which provides the @turbo and @tturbo macros for accelerating the computation of loops. This can be used to accelerating the model functions sent to the equation solvers, for example, accelerating handwritten PDE discretizations.

Polyester.jl: Cheap Threads

Polyester.jl is a cheaper version of threads for Julia, which use a set pool of threads for lower overhead. Note that Polyester does not compose with the standard Julia composable threading infrastructure, and thus one must take care not to compose two levels of Polyester, as this will oversubscribe the computation and lead to performance degradation. Many SciML solvers have options to use Polyester for threading to achieve the top performance.

Tullio.jl: Fast Tensor Calculations and Einstein Notation

Tullio.jl is a library for fast tensor calculations with Einstein notation. It allows for defining operations which are compatible with automatic differentiation, GPUs, and more.

ParallelStencil.jl: High-Level Code for Parallelized Stencil Computations

ParallelStencil.jl is a library for writing high-level code for parallelized stencil computations. It is compatible with SciML equation solvers and is thus a good way to generate GPU and distributed parallel model code.

Julia Utilities

StaticCompiler.jl

StaticCompiler.jl is a package for generating static binaries from Julia code. It only supports a subset of Julia, so not all equation solver algorithms are compatible with StaticCompiler.jl.

PackageCompiler.jl

PackageCompiler.jl is a package for generating shared libraries from Julia code. It builds the entirety of Julia by bundling a system image with the Julia runtime. It thus builds complete binaries that can hold all the functionality of SciML. Furthermore, it can also be used to generate new system images to decrease startup times and remove JIT-compilation from SciML usage.

diff --git a/dev/highlevels/parameter_analysis/index.html b/dev/highlevels/parameter_analysis/index.html index 99a84d8dbfc..6d59deaa755 100644 --- a/dev/highlevels/parameter_analysis/index.html +++ b/dev/highlevels/parameter_analysis/index.html @@ -1,2 +1,2 @@ -Parameter Analysis Utilities · Overview of Julia's SciML

Parameter Analysis Utilities

GlobalSensitivity.jl: Global Sensitivity Analysis

Derivatives calculate the local sensitivity of a model, i.e. the change in the simulation's outcome if one were to change the parameter with respect to some chosen part of the parameter space. But how does a simulation's output change “in general” with respect to a given parameter? That is what global sensitivity analysis (GSA) computes, and thus GlobalSensitivity.jl is the way to answer that question. GlobalSensitivity.jl includes a wide array of methods, including:

  • Morris's method
  • Sobol's method
  • Regression methods (PCC, SRC, Pearson)
  • eFAST
  • Delta Moment-Independent method
  • Derivative-based Global Sensitivity Measures (DGSM)
  • EASI
  • Fractional Factorial method
  • Random Balance Design FAST method

StructuralIdentifiability.jl: Identifiability Analysis Made Simple

Performing parameter estimation from a data set means attempting to recover parameters like reaction rates by fitting some model to the data. But how do you know whether you have enough data to even consider getting the “correct” parameters back? StructuralIdentifiability.jl allows for running a structural identifiability analysis on a given model to determine whether it's theoretically possible to recover the correct parameters. It can state whether a given type of output data can be used to globally recover the parameters (i.e. only a unique parameter set for the model produces a given output), whether the parameters are only locally identifiable (i.e. there are finitely many parameter sets which could generate the seen data), or whether it's unidentifiable (there are infinitely many parameters which generate the same output data).

For more information on what StructuralIdentifiability.jl is all about, see the SciMLCon 2022 tutorial video.

MinimallyDisruptiveCurves.jl

MinimallyDisruptiveCurves.jl is a library for finding relationships between parameters of models, finding the curves on which the solution is constant.

Third-Party Libraries to Note

SIAN.jl: Structural Identifiability Analyzer

SIAN.jl is a structural identifiability analysis package which uses an entirely different algorithm from StructuralIdentifiability.jl. For information on the differences between the two approaches, see the Structural Identifiability Tools in Julia tutorial.

DynamicalSystems.jl: A Suite of Dynamical Systems Analysis

DynamicalSystems.jl is an entire ecosystem of dynamical systems analysis methods, for computing measures of chaos (dimension estimation, Lyapunov coefficients), generating delay embeddings, and much more. It uses the SciML tools for its internal equation solving and thus shares much of its API, adding a layer of new tools for extended analyses.

For more information, watch the tutorial Introduction to DynamicalSystems.jl.

BifurcationKit.jl

BifurcationKit.jl is a tool for performing bifurcation analysis. It uses and composes with many SciML equation solvers.

ReachabilityAnalysis.jl

ReachabilityAnalysis.jl is a library for performing reachability analysis of dynamical systems, determining for a given uncertainty interval the full set of possible outcomes from a dynamical system.

ControlSystems.jl

ControlSystems.jl is a library for building and analyzing control systems.

+Parameter Analysis Utilities · Overview of Julia's SciML

Parameter Analysis Utilities

GlobalSensitivity.jl: Global Sensitivity Analysis

Derivatives calculate the local sensitivity of a model, i.e. the change in the simulation's outcome if one were to change the parameter with respect to some chosen part of the parameter space. But how does a simulation's output change “in general” with respect to a given parameter? That is what global sensitivity analysis (GSA) computes, and thus GlobalSensitivity.jl is the way to answer that question. GlobalSensitivity.jl includes a wide array of methods, including:

  • Morris's method
  • Sobol's method
  • Regression methods (PCC, SRC, Pearson)
  • eFAST
  • Delta Moment-Independent method
  • Derivative-based Global Sensitivity Measures (DGSM)
  • EASI
  • Fractional Factorial method
  • Random Balance Design FAST method

StructuralIdentifiability.jl: Identifiability Analysis Made Simple

Performing parameter estimation from a data set means attempting to recover parameters like reaction rates by fitting some model to the data. But how do you know whether you have enough data to even consider getting the “correct” parameters back? StructuralIdentifiability.jl allows for running a structural identifiability analysis on a given model to determine whether it's theoretically possible to recover the correct parameters. It can state whether a given type of output data can be used to globally recover the parameters (i.e. only a unique parameter set for the model produces a given output), whether the parameters are only locally identifiable (i.e. there are finitely many parameter sets which could generate the seen data), or whether it's unidentifiable (there are infinitely many parameters which generate the same output data).

For more information on what StructuralIdentifiability.jl is all about, see the SciMLCon 2022 tutorial video.

MinimallyDisruptiveCurves.jl

MinimallyDisruptiveCurves.jl is a library for finding relationships between parameters of models, finding the curves on which the solution is constant.

Third-Party Libraries to Note

SIAN.jl: Structural Identifiability Analyzer

SIAN.jl is a structural identifiability analysis package which uses an entirely different algorithm from StructuralIdentifiability.jl. For information on the differences between the two approaches, see the Structural Identifiability Tools in Julia tutorial.

DynamicalSystems.jl: A Suite of Dynamical Systems Analysis

DynamicalSystems.jl is an entire ecosystem of dynamical systems analysis methods, for computing measures of chaos (dimension estimation, Lyapunov coefficients), generating delay embeddings, and much more. It uses the SciML tools for its internal equation solving and thus shares much of its API, adding a layer of new tools for extended analyses.

For more information, watch the tutorial Introduction to DynamicalSystems.jl.

BifurcationKit.jl

BifurcationKit.jl is a tool for performing bifurcation analysis. It uses and composes with many SciML equation solvers.

ReachabilityAnalysis.jl

ReachabilityAnalysis.jl is a library for performing reachability analysis of dynamical systems, determining for a given uncertainty interval the full set of possible outcomes from a dynamical system.

ControlSystems.jl

ControlSystems.jl is a library for building and analyzing control systems.

diff --git a/dev/highlevels/partial_differential_equation_solvers/index.html b/dev/highlevels/partial_differential_equation_solvers/index.html index c46ae54d9d5..3bfe6893198 100644 --- a/dev/highlevels/partial_differential_equation_solvers/index.html +++ b/dev/highlevels/partial_differential_equation_solvers/index.html @@ -1,2 +1,2 @@ -Partial Differential Equations (PDE) · Overview of Julia's SciML

Partial Differential Equations (PDE)

NeuralPDE.jl: Physics-Informed Neural Network (PINN) PDE Solvers

NeuralPDE.jl is a partial differential equation solver library which uses physics-informed neural networks (PINNs) to solve the equations. It uses the ModelingToolkit.jl symbolic PDESystem as its input and can handle a wide variety of equation types, including systems of partial differential equations, partial differential-algebraic equations, and integro-differential equations. Its benefit is its flexibility, and it can be used to easily generate surrogate solutions over entire parameter ranges. However, its downside is solver speed: PINN solvers tend to be a lot slower than other methods for solving PDEs.

MethodOflines.jl: Automated Finite Difference Method (FDM)

MethodOflines.jl is a partial differential equation solver library which automates the discretization of PDEs via the finite difference method. It uses the ModelingToolkit.jl symbolic PDESystem as its input, and generates AbstractSystems and SciMLProblems whose numerical solution gives the solution to the PDE.

FEniCS.jl: Wrappers for the Finite Element Method (FEM)

FEniCS.jl is a wrapper for the popular FEniCS finite element method library.

HighDimPDE.jl: High-dimensional PDE Solvers

HighDimPDE.jl is a partial differential equation solver library which implements algorithms that break down the curse of dimensionality to solve the equations. It implements deep-learning based and Picard-iteration based methods to approximately solve high-dimensional, nonlinear, non-local PDEs in up to 10,000 dimensions. Its cons are accuracy: high-dimensional solvers are stochastic, and might result in wrong solutions if the solver meta-parameters are not appropriate.

NeuralOperators.jl: (Fourier) Neural Operators and DeepONets for PDE Solving

NeuralOperators.jl is a library for operator learning based PDE solvers. This includes techniques like:

  • Fourier Neural Operators (FNO)
  • Deep Operator Networks (DeepONets)
  • Markov Neural Operators (MNO)

Currently, its connection to PDE solving must be specified manually, though an interface for ModelingToolkit PDESystems is in progress.

DiffEqOperators.jl: Operators for Finite Difference Method (FDM) Discretizations

DiffEqOperators.jl is a library for defining finite difference operators to easily perform manual FDM semi-discretizations of partial differential equations. This library is fairly incomplete and most cases should receive better performance using MethodOflines.jl.

Third-Party Libraries to Note

ApproxFun.jl: Automated Spectral Discretizations

ApproxFun.jl is a package for approximating functions in basis sets. One particular use case is with spectral basis sets, such as Chebyshev functions and Fourier decompositions, making it easy to represent spectral and pseudospectral discretizations of partial differential equations as ordinary differential equations for the SciML equation solvers.

Gridap.jl: Julia-Based Tools for Finite Element Discretizations

Gridap.jl is a package for grid-based approximation of partial differential equations, particularly notable for its use of conforming and nonconforming finite element (FEM) discretizations.

Trixi.jl: Adaptive High-Order Numerical Simulations of Hyperbolic Equations

Trixi.jl is a package for numerical simulation of hyperbolic conservation laws, i.e. a large set of hyperbolic partial differential equations, which interfaces and uses the SciML ordinary differential equation solvers.

VoronoiFVM.jl: Tools for the Voronoi Finite Volume Discretizations

VoronoiFVM.jl is a library for generating FVM discretizations of systems of PDEs. It interfaces with many of the SciML equation solver libraries to allow for ease of discretization and flexibility in the solver choice.

+Partial Differential Equations (PDE) · Overview of Julia's SciML

Partial Differential Equations (PDE)

NeuralPDE.jl: Physics-Informed Neural Network (PINN) PDE Solvers

NeuralPDE.jl is a partial differential equation solver library which uses physics-informed neural networks (PINNs) to solve the equations. It uses the ModelingToolkit.jl symbolic PDESystem as its input and can handle a wide variety of equation types, including systems of partial differential equations, partial differential-algebraic equations, and integro-differential equations. Its benefit is its flexibility, and it can be used to easily generate surrogate solutions over entire parameter ranges. However, its downside is solver speed: PINN solvers tend to be a lot slower than other methods for solving PDEs.

MethodOflines.jl: Automated Finite Difference Method (FDM)

MethodOflines.jl is a partial differential equation solver library which automates the discretization of PDEs via the finite difference method. It uses the ModelingToolkit.jl symbolic PDESystem as its input, and generates AbstractSystems and SciMLProblems whose numerical solution gives the solution to the PDE.

FEniCS.jl: Wrappers for the Finite Element Method (FEM)

FEniCS.jl is a wrapper for the popular FEniCS finite element method library.

HighDimPDE.jl: High-dimensional PDE Solvers

HighDimPDE.jl is a partial differential equation solver library which implements algorithms that break down the curse of dimensionality to solve the equations. It implements deep-learning based and Picard-iteration based methods to approximately solve high-dimensional, nonlinear, non-local PDEs in up to 10,000 dimensions. Its cons are accuracy: high-dimensional solvers are stochastic, and might result in wrong solutions if the solver meta-parameters are not appropriate.

NeuralOperators.jl: (Fourier) Neural Operators and DeepONets for PDE Solving

NeuralOperators.jl is a library for operator learning based PDE solvers. This includes techniques like:

  • Fourier Neural Operators (FNO)
  • Deep Operator Networks (DeepONets)
  • Markov Neural Operators (MNO)

Currently, its connection to PDE solving must be specified manually, though an interface for ModelingToolkit PDESystems is in progress.

DiffEqOperators.jl: Operators for Finite Difference Method (FDM) Discretizations

DiffEqOperators.jl is a library for defining finite difference operators to easily perform manual FDM semi-discretizations of partial differential equations. This library is fairly incomplete and most cases should receive better performance using MethodOflines.jl.

Third-Party Libraries to Note

ApproxFun.jl: Automated Spectral Discretizations

ApproxFun.jl is a package for approximating functions in basis sets. One particular use case is with spectral basis sets, such as Chebyshev functions and Fourier decompositions, making it easy to represent spectral and pseudospectral discretizations of partial differential equations as ordinary differential equations for the SciML equation solvers.

Gridap.jl: Julia-Based Tools for Finite Element Discretizations

Gridap.jl is a package for grid-based approximation of partial differential equations, particularly notable for its use of conforming and nonconforming finite element (FEM) discretizations.

Trixi.jl: Adaptive High-Order Numerical Simulations of Hyperbolic Equations

Trixi.jl is a package for numerical simulation of hyperbolic conservation laws, i.e. a large set of hyperbolic partial differential equations, which interfaces and uses the SciML ordinary differential equation solvers.

VoronoiFVM.jl: Tools for the Voronoi Finite Volume Discretizations

VoronoiFVM.jl is a library for generating FVM discretizations of systems of PDEs. It interfaces with many of the SciML equation solver libraries to allow for ease of discretization and flexibility in the solver choice.

diff --git a/dev/highlevels/plots_visualization/index.html b/dev/highlevels/plots_visualization/index.html index 5745b62ec76..2a899135f9b 100644 --- a/dev/highlevels/plots_visualization/index.html +++ b/dev/highlevels/plots_visualization/index.html @@ -1,2 +1,2 @@ -SciML-Supported Plotting and Visualization Libraries · Overview of Julia's SciML

SciML-Supported Plotting and Visualization Libraries

The following libraries are the plotting and visualization libraries which are supported and co-developed by the SciML developers. Other libraries may be used, though these are the libraries used in the tutorials and which have special hooks to ensure ergonomic usage with SciML tooling.

Plots.jl

Plots.jl is the current standard plotting system for the SciML ecosystem. SciML types attempt to include plot recipes for as many types as possible, allowing for automatic visualization with the Plots.jl system. All current tutorials and documentation default to using Plots.jl.

Makie.jl

Makie.jl is a high-performance interactive plotting system for the Julia programming language. It's planned to be the default plotting system used by the SciML organization in the near future.

+SciML-Supported Plotting and Visualization Libraries · Overview of Julia's SciML

SciML-Supported Plotting and Visualization Libraries

The following libraries are the plotting and visualization libraries which are supported and co-developed by the SciML developers. Other libraries may be used, though these are the libraries used in the tutorials and which have special hooks to ensure ergonomic usage with SciML tooling.

Plots.jl

Plots.jl is the current standard plotting system for the SciML ecosystem. SciML types attempt to include plot recipes for as many types as possible, allowing for automatic visualization with the Plots.jl system. All current tutorials and documentation default to using Plots.jl.

Makie.jl

Makie.jl is a high-performance interactive plotting system for the Julia programming language. It's planned to be the default plotting system used by the SciML organization in the near future.

diff --git a/dev/highlevels/symbolic_learning/index.html b/dev/highlevels/symbolic_learning/index.html index d2c72ef5a55..ae7b9ff042c 100644 --- a/dev/highlevels/symbolic_learning/index.html +++ b/dev/highlevels/symbolic_learning/index.html @@ -1,2 +1,2 @@ -Symbolic Learning and Artificial Intelligence · Overview of Julia's SciML

Symbolic Learning and Artificial Intelligence

Symbolic learning, the classical artificial intelligence, is a set of methods for learning symbolic equations from data and numerical functions. SciML offers an array of symbolic learning utilities which connect with the other machine learning and equation solver functionalities to make it easy to embed prior knowledge and discover missing physics. For more information, see Universal Differential Equations for Scientific Machine Learning.

DataDrivenDiffEq.jl: Data-Driven Modeling and Automated Discovery of Dynamical Systems

DataDrivenDiffEq.jl is a general interface for data-driven modeling, containing a large array of techniques such as:

  • Koopman operator methods (Dynamic-Mode Decomposition (DMD) and variations)
  • Sparse Identification of Dynamical Systems (SINDy and variations like iSINDy)
  • Sparse regression methods (STSLQ, SR3, etc.)
  • PDEFind
  • Wrappers for SymbolicRegression.jl
  • AI Feynman
  • OccamNet

SymbolicNumericIntegration.jl: Symbolic Integration via Numerical Methods

SymbolicNumericIntegration.jl is a package computing the solution to symbolic integration problem using numerical methods (numerical integration mixed with sparse regression).

Third-Party Libraries to Note

SymbolicRegression.jl

SymbolicRegression.jl is a symbolic regression library which uses genetic algorithms with parallelization to achieve fast and robust symbolic learning.

+Symbolic Learning and Artificial Intelligence · Overview of Julia's SciML

Symbolic Learning and Artificial Intelligence

Symbolic learning, the classical artificial intelligence, is a set of methods for learning symbolic equations from data and numerical functions. SciML offers an array of symbolic learning utilities which connect with the other machine learning and equation solver functionalities to make it easy to embed prior knowledge and discover missing physics. For more information, see Universal Differential Equations for Scientific Machine Learning.

DataDrivenDiffEq.jl: Data-Driven Modeling and Automated Discovery of Dynamical Systems

DataDrivenDiffEq.jl is a general interface for data-driven modeling, containing a large array of techniques such as:

  • Koopman operator methods (Dynamic-Mode Decomposition (DMD) and variations)
  • Sparse Identification of Dynamical Systems (SINDy and variations like iSINDy)
  • Sparse regression methods (STSLQ, SR3, etc.)
  • PDEFind
  • Wrappers for SymbolicRegression.jl
  • AI Feynman
  • OccamNet

SymbolicNumericIntegration.jl: Symbolic Integration via Numerical Methods

SymbolicNumericIntegration.jl is a package computing the solution to symbolic integration problem using numerical methods (numerical integration mixed with sparse regression).

Third-Party Libraries to Note

SymbolicRegression.jl

SymbolicRegression.jl is a symbolic regression library which uses genetic algorithms with parallelization to achieve fast and robust symbolic learning.

diff --git a/dev/highlevels/symbolic_tools/index.html b/dev/highlevels/symbolic_tools/index.html index ee92029d394..59b2072df21 100644 --- a/dev/highlevels/symbolic_tools/index.html +++ b/dev/highlevels/symbolic_tools/index.html @@ -1,2 +1,2 @@ -Symbolic Model Tooling and JuliaSymbolics · Overview of Julia's SciML

Symbolic Model Tooling and JuliaSymbolics

JuliaSymbolics is a sister organization of SciML. It spawned out of the symbolic modeling tools being developed within SciML (ModelingToolkit.jl) to become its own organization dedicated to building a fully-featured Julia-based Computer Algebra System (CAS). As such, the two organizations are closely aligned in terms of its developer community, and many of the SciML libraries use Symbolics.jl extensively.

ModelOrderReduction.jl: Automated Model Reduction for Fast Approximations of Solutions

ModelOrderReduction.jl is a package for automating the reduction of models. These methods function a submodel with a projection, where solving the smaller model provides approximation information about the full model. MOR.jl uses ModelingToolkit.jl as a system description and automatically transforms equations to the subform, defining the observables to automatically lazily reconstruct the full model on-demand in a fast and stable form.

Symbolics.jl: The Computer Algebra System (CAS) of the Julia Programming Language

Symbolics.jl is the CAS of the Julia programming language. If something needs to be done symbolically, most likely Symbolics.jl is the answer.

MetaTheory.jl: E-Graphs to Automate Symbolic Transformations

Metatheory.jl is a library for defining e-graph rewriters for use on the common symbolic interface. This can be used to do all sorts of analysis and code transformations, such as improving code performance, numerical stability, and more. See Automated Code Optimization with E-Graphs for more details.

SymbolicUtils.jl: Define Your Own Computer Algebra System

SymbolicUtils.jl is the underlying utility library and rule-based rewriting language on which Symbolics.jl is developed. Symbolics.jl is standardized type and rule definitions built using SymbolicUtils.jl. However, if non-standard types are required, such as symbolic computing over Fock algebras, then SymbolicUtils.jl is the library from which the new symbolic types can be implemented.

+Symbolic Model Tooling and JuliaSymbolics · Overview of Julia's SciML

Symbolic Model Tooling and JuliaSymbolics

JuliaSymbolics is a sister organization of SciML. It spawned out of the symbolic modeling tools being developed within SciML (ModelingToolkit.jl) to become its own organization dedicated to building a fully-featured Julia-based Computer Algebra System (CAS). As such, the two organizations are closely aligned in terms of its developer community, and many of the SciML libraries use Symbolics.jl extensively.

ModelOrderReduction.jl: Automated Model Reduction for Fast Approximations of Solutions

ModelOrderReduction.jl is a package for automating the reduction of models. These methods function a submodel with a projection, where solving the smaller model provides approximation information about the full model. MOR.jl uses ModelingToolkit.jl as a system description and automatically transforms equations to the subform, defining the observables to automatically lazily reconstruct the full model on-demand in a fast and stable form.

Symbolics.jl: The Computer Algebra System (CAS) of the Julia Programming Language

Symbolics.jl is the CAS of the Julia programming language. If something needs to be done symbolically, most likely Symbolics.jl is the answer.

MetaTheory.jl: E-Graphs to Automate Symbolic Transformations

Metatheory.jl is a library for defining e-graph rewriters for use on the common symbolic interface. This can be used to do all sorts of analysis and code transformations, such as improving code performance, numerical stability, and more. See Automated Code Optimization with E-Graphs for more details.

SymbolicUtils.jl: Define Your Own Computer Algebra System

SymbolicUtils.jl is the underlying utility library and rule-based rewriting language on which Symbolics.jl is developed. Symbolics.jl is standardized type and rule definitions built using SymbolicUtils.jl. However, if non-standard types are required, such as symbolic computing over Fock algebras, then SymbolicUtils.jl is the library from which the new symbolic types can be implemented.

diff --git a/dev/highlevels/uncertainty_quantification/index.html b/dev/highlevels/uncertainty_quantification/index.html index e6669d6afa2..38e1c22c3fc 100644 --- a/dev/highlevels/uncertainty_quantification/index.html +++ b/dev/highlevels/uncertainty_quantification/index.html @@ -1,2 +1,2 @@ -Uncertainty Quantification · Overview of Julia's SciML

Uncertainty Quantification

There's always uncertainty in our models. Whether it's in the form of the model's equations or in the model's parameters, the uncertainty in our simulation's output often needs to be quantified. The following tools automate this process.

For Measurements.jl vs MonteCarloMeasurements.jl vs Intervals.jl, and the relation to other methods, see the Uncertainty Programming chapter of the SciML Book.

PolyChaos.jl: Intrusive Polynomial Chaos Expansions Made Unintrusive

PolyChaos.jl is a library for calculating intrusive polynomial chaos expansions (PCE) on arbitrary Julia functions. This allows for inputting representations of probability distributions into functions to compute the output distribution in an expansion representation. While normally this would require deriving the PCE-expanded equations by hand, PolyChaos.jl does this at the compiler level using Julia's multiple dispatch, giving a high-performance implementation to a normally complex and tedious mathematical transformation.

SciMLExpectations.jl: Fast Calculations of Expectations of Equation Solutions

SciMLExpectations.jl is a library for accelerating the calculation of expectations of equation solutions with respect to input probability distributions, allowing for applications like robust optimization with respect to uncertainty. It uses Koopman operator techniques to calculate these expectations without requiring the propagation of uncertainties through a solver, effectively performing the adjoint of uncertainty quantification and being much more efficient in the process.

Third-Party Libraries to Note

Measurements.jl: Automated Linear Error Propagation

Measurements.jl is a library for automating linear error propagation. Uncertain numbers are defined as x = 3.8 ± 0.4 and are pushed through calculations using a normal distribution approximation in order to compute an approximate uncertain output. Measurements.jl uses a dictionary-based approach to keep track of correlations to improve the accuracy over naive implementations, though note that linear error propagation theory still has some major issues handling some types of equations, as described in detail in the MonteCarloMeasurements.jl documentation.

MonteCarloMeasurements.jl: Automated Monte Carlo Error Propagation

MonteCarloMeasurements.jl is a library for automating the uncertainty quantification of equation solution using Monte Carlo methods. It defines number types which sample from an input distribution to receive a representative set of parameters that propagate through the solver to calculate a representative set of possible solutions. Note that Monte Carlo techniques can be expensive but are exact, in the sense that as the number of sample points increases to infinity it will compute a correct approximation of the output uncertainty.

ProbNumDiffEq.jl: Probabilistic Numerics Based Differential Equation Solvers

ProbNumDiffEq.jl is a set of probabilistic numerical ODE solvers which compute the solution of a differential equation along with a posterior distribution to estimate its numerical approximation error. Thus these specialized integrators compute an uncertainty output similar to the ProbInts technique of DiffEqUncertainty, but use specialized integration techniques in order to do it much faster for specific kinds of equations.

TaylorIntegration.jl: Taylor Series Integration for Rigorous Numerical Bounds

TaylorIntegration.jl is a library for Taylor series integrators, which has special functionality for computing the interval bound of possible solutions with respect to numerical approximation error.

IntervalArithmetic.jl: Rigorous Numerical Intervals

IntervalArithmetic.jl is a library for performing interval arithmetic calculations on arbitrary Julia code. Interval arithmetic computes rigorous computations with respect to finite-precision floating-point arithmetic, i.e. its intervals are guaranteed to include the true solution. However, interval arithmetic intervals can grow at exponential rates in many problems, thus being unsuitable for analyses in many equation solver contexts.

+Uncertainty Quantification · Overview of Julia's SciML

Uncertainty Quantification

There's always uncertainty in our models. Whether it's in the form of the model's equations or in the model's parameters, the uncertainty in our simulation's output often needs to be quantified. The following tools automate this process.

For Measurements.jl vs MonteCarloMeasurements.jl vs Intervals.jl, and the relation to other methods, see the Uncertainty Programming chapter of the SciML Book.

PolyChaos.jl: Intrusive Polynomial Chaos Expansions Made Unintrusive

PolyChaos.jl is a library for calculating intrusive polynomial chaos expansions (PCE) on arbitrary Julia functions. This allows for inputting representations of probability distributions into functions to compute the output distribution in an expansion representation. While normally this would require deriving the PCE-expanded equations by hand, PolyChaos.jl does this at the compiler level using Julia's multiple dispatch, giving a high-performance implementation to a normally complex and tedious mathematical transformation.

SciMLExpectations.jl: Fast Calculations of Expectations of Equation Solutions

SciMLExpectations.jl is a library for accelerating the calculation of expectations of equation solutions with respect to input probability distributions, allowing for applications like robust optimization with respect to uncertainty. It uses Koopman operator techniques to calculate these expectations without requiring the propagation of uncertainties through a solver, effectively performing the adjoint of uncertainty quantification and being much more efficient in the process.

Third-Party Libraries to Note

Measurements.jl: Automated Linear Error Propagation

Measurements.jl is a library for automating linear error propagation. Uncertain numbers are defined as x = 3.8 ± 0.4 and are pushed through calculations using a normal distribution approximation in order to compute an approximate uncertain output. Measurements.jl uses a dictionary-based approach to keep track of correlations to improve the accuracy over naive implementations, though note that linear error propagation theory still has some major issues handling some types of equations, as described in detail in the MonteCarloMeasurements.jl documentation.

MonteCarloMeasurements.jl: Automated Monte Carlo Error Propagation

MonteCarloMeasurements.jl is a library for automating the uncertainty quantification of equation solution using Monte Carlo methods. It defines number types which sample from an input distribution to receive a representative set of parameters that propagate through the solver to calculate a representative set of possible solutions. Note that Monte Carlo techniques can be expensive but are exact, in the sense that as the number of sample points increases to infinity it will compute a correct approximation of the output uncertainty.

ProbNumDiffEq.jl: Probabilistic Numerics Based Differential Equation Solvers

ProbNumDiffEq.jl is a set of probabilistic numerical ODE solvers which compute the solution of a differential equation along with a posterior distribution to estimate its numerical approximation error. Thus these specialized integrators compute an uncertainty output similar to the ProbInts technique of DiffEqUncertainty, but use specialized integration techniques in order to do it much faster for specific kinds of equations.

TaylorIntegration.jl: Taylor Series Integration for Rigorous Numerical Bounds

TaylorIntegration.jl is a library for Taylor series integrators, which has special functionality for computing the interval bound of possible solutions with respect to numerical approximation error.

IntervalArithmetic.jl: Rigorous Numerical Intervals

IntervalArithmetic.jl is a library for performing interval arithmetic calculations on arbitrary Julia code. Interval arithmetic computes rigorous computations with respect to finite-precision floating-point arithmetic, i.e. its intervals are guaranteed to include the true solution. However, interval arithmetic intervals can grow at exponential rates in many problems, thus being unsuitable for analyses in many equation solver contexts.

diff --git a/dev/index.html b/dev/index.html index 30c140b918e..c1a0115b148 100644 --- a/dev/index.html +++ b/dev/index.html @@ -1,5 +1,5 @@ -SciML: Open Source Software for Scientific Machine Learning with Julia · Overview of Julia's SciML

SciML: Differentiable Modeling and Simulation Combined with Machine Learning

The SciML organization is a collection of tools for solving equations and modeling systems developed in the Julia programming language with bindings to other languages such as R and Python. The organization provides well-maintained tools which compose together as a coherent ecosystem. It has a coherent development principle, unified APIs over large collections of equation solvers, pervasive differentiability and sensitivity analysis, and features many of the highest performance and parallel implementations one can find.

Scientific Machine Learning (SciML) = Scientific Computing + Machine Learning

Where to Start?

And for diving into the details, use the bar on the top to navigate to the submodule of interest!

Reproducibility

The documentation of the [SciML Showcase](@ref showcase) was built using these direct dependencies,
Status `/var/lib/buildkite-agent/builds/gpuci-1/julialang/scimldocs/docs/Project.toml`
+SciML: Open Source Software for Scientific Machine Learning with Julia · Overview of Julia's SciML

SciML: Differentiable Modeling and Simulation Combined with Machine Learning

The SciML organization is a collection of tools for solving equations and modeling systems developed in the Julia programming language with bindings to other languages such as R and Python. The organization provides well-maintained tools which compose together as a coherent ecosystem. It has a coherent development principle, unified APIs over large collections of equation solvers, pervasive differentiability and sensitivity analysis, and features many of the highest performance and parallel implementations one can find.

Scientific Machine Learning (SciML) = Scientific Computing + Machine Learning

Where to Start?

And for diving into the details, use the bar on the top to navigate to the submodule of interest!

Reproducibility

The documentation of the [SciML Showcase](@ref showcase) was built using these direct dependencies,
Status `/var/lib/buildkite-agent/builds/gpuci-12/julialang/scimldocs/docs/Project.toml`
   [0bf59076] AdvancedHMC v0.5.5
   [6e4b80f9] BenchmarkTools v1.3.2
   [336ed68f] CSV v0.10.11
@@ -37,7 +37,7 @@
   [1dea7af3] OrdinaryDiffEq v6.58.0
   [91a5bcdd] Plots v1.39.0
   [afe9f18d] SciMLExpectations v2.1.0
-  [1ed8b502] SciMLSensitivity v7.44.0
+  [1ed8b502] SciMLSensitivity v7.45.0
   [860ef19b] StableRNGs v1.0.0
   [90137ffa] StaticArrays v1.6.5
   [f3b207a7] StatsPlots v0.15.6
@@ -66,7 +66,7 @@
   JULIA_DEPOT_PATH = /root/.cache/julia-buildkite-plugin/depots/0183cc98-c3b4-4959-aaaa-6c0d5f351407
   LD_LIBRARY_PATH = /usr/local/nvidia/lib:/usr/local/nvidia/lib64
   JULIA_PKG_SERVER =
-  JULIA_IMAGE_THREADS = 1
A more complete overview of all dependencies and their versions is also provided.
Status `/var/lib/buildkite-agent/builds/gpuci-1/julialang/scimldocs/docs/Manifest.toml`
+  JULIA_IMAGE_THREADS = 1
A more complete overview of all dependencies and their versions is also provided.
Status `/var/lib/buildkite-agent/builds/gpuci-12/julialang/scimldocs/docs/Manifest.toml`
   [47edcb42] ADTypes v0.2.4
   [a4c015fc] ANSIColoredPrinters v0.0.1
  [c3fe647b] AbstractAlgebra v0.32.5
@@ -346,7 +346,7 @@
   [afe9f18d] SciMLExpectations v2.1.0
   [e9a6253c] SciMLNLSolve v0.1.9
   [c0aeaf25] SciMLOperators v0.3.6
-  [1ed8b502] SciMLSensitivity v7.44.0
+  [1ed8b502] SciMLSensitivity v7.45.0
   [30f210dd] ScientificTypesBase v3.0.0
   [6c6a2e73] Scratch v1.2.0
   [91c51154] SentinelArrays v1.4.0
@@ -388,7 +388,7 @@
   [0c5d862f] Symbolics v5.10.0
   [ab02a1b2] TableOperations v1.2.0
   [3783bdb8] TableTraits v1.0.1
-  [bd369af6] Tables v1.11.0
+  [bd369af6] Tables v1.11.1
   [62fd8b95] TensorCore v0.1.1
   [8ea1fca8] TermInterface v0.3.3
   [5d786b92] TerminalLoggers v0.1.7
@@ -565,4 +565,4 @@
   [8e850b90] libblastrampoline_jll v5.8.0+0
   [8e850ede] nghttp2_jll v1.48.0+0
   [3f19e933] p7zip_jll v17.4.0+0
-Info Packages marked with  and  have new versions available, but those with  are restricted by compatibility constraints from upgrading. To see why use `status --outdated -m`

You can also download the manifest file and the project file.

+Info Packages marked with and have new versions available, but those with are restricted by compatibility constraints from upgrading. To see why use `status --outdated -m`

You can also download the manifest file and the project file.

diff --git a/dev/overview/index.html b/dev/overview/index.html index 0a60b0d0f1b..db0daaec447 100644 --- a/dev/overview/index.html +++ b/dev/overview/index.html @@ -1,2 +1,2 @@ -Detailed Overview of the SciML Software Ecosystem · Overview of Julia's SciML

Detailed Overview of the SciML Software Ecosystem

SciML: Combining High-Performance Scientific Computing and Machine Learning

SciML is not standard machine learning, SciML is the combination of scientific computing techniques with machine learning. Thus the SciML organization is not an organization for machine learning libraries (see FluxML for machine learning in Julia), rather SciML is an organization dedicated to the development of scientific computing tools which work seamlessly in conjunction with next-generation machine learning workflows. This includes:

  • High-performance and accurate tools for standard scientific computing modeling and simulation
  • Compatibility with differentiable programming and automatic differentiation
  • Tools for building complex multiscale models
  • Methods for handling inverse problems, model calibration, controls, and Bayesian analysis
  • Symbolic modeling tools for generating efficient code for numerical equation solvers
  • Methods for automatic discovery of (bio)physical equations

and much more. For an overview of the broad goals of the SciML organization, watch:

Overview of Computational Science in Julia with SciML

Below is a simplification of the user-facing packages for use in scientific computing and SciML workflows.

Workflow ElementSciML-Supported Julia packages
Plotting and VisualizationPlots*, Makie*
Sparse matrixSparseArrays*
Interpolation/approximationDataInterpolations*, ApproxFun*
Linear system / least squaresLinearSolve
Nonlinear system / rootfindingNonlinearSolve
Polynomial rootsPolynomials*
IntegrationIntegrals
Nonlinear OptimizationOptimization
Other Optimization (linear, quadratic, convex, etc.)JuMP*
Initial-value problemDifferentialEquations
Boundary-value problemDifferentialEquations
Continuous-Time Markov Chains (Poisson Jumps), Jump DiffusionsJumpProcesses
Finite differencesFiniteDifferences*, FiniteDiff*
Automatic DifferentiationForwardDiff*, Enzyme*, DiffEqSensitivity
Bayesian ModelingTuring*
Deep LearningFlux*
Acausal Modeling / DAEsModelingToolkit
Chemical Reaction NetworksCatalyst
Symbolic ComputingSymbolics
Fast Fourier TransformFFTW*
Partial Differential Equation DiscretizationsAssociated Julia packages
–-–-
Finite DifferencesMethodOfLines
Discontinuous GalerkinTrixi*
Finite ElementGridap*
Physics-Informed Neural NetworksNeuralPDE
Neural OperatorsNeuralOperators
High Dimensional Deep LearningHighDimPDE

* Denotes a non-SciML package that is heavily tested against as part of SciML workflows and has frequent collaboration with the SciML developers.

SciML Mind Map

Domains of SciML

The SciML common interface covers the following domains:

  • Linear systems (LinearProblem)

    • Direct methods for dense and sparse
    • Iterative solvers with preconditioning
  • Nonlinear Systems (NonlinearProblem)

    • Systems of nonlinear equations
    • Scalar bracketing systems
  • Integrals (quadrature) (IntegralProblem)

  • Differential Equations

    • Discrete equations (function maps, discrete stochastic (Gillespie/Markov) simulations) (DiscreteProblem and JumpProblem)
    • Ordinary differential equations (ODEs) (ODEProblem)
    • Split and Partitioned ODEs (Symplectic integrators, IMEX Methods) (SplitODEProblem)
    • Stochastic ordinary differential equations (SODEs or SDEs) (SDEProblem)
    • Stochastic differential-algebraic equations (SDAEs) (SDEProblem with mass matrices)
    • Random differential equations (RODEs or RDEs) (RODEProblem)
    • Differential algebraic equations (DAEs) (DAEProblem and ODEProblem with mass matrices)
    • Delay differential equations (DDEs) (DDEProblem)
    • Neutral, retarded, and algebraic delay differential equations (NDDEs, RDDEs, and DDAEs)
    • Stochastic delay differential equations (SDDEs) (SDDEProblem)
    • Experimental support for stochastic neutral, retarded, and algebraic delay differential equations (SNDDEs, SRDDEs, and SDDAEs)
    • Mixed discrete and continuous equations (Hybrid Equations, Jump Diffusions) (DEProblems with callbacks and JumpProblem)
  • Optimization (OptimizationProblem)

    • Nonlinear (constrained) optimization
  • (Stochastic/Delay/Differential-Algebraic) Partial Differential Equations (PDESystem)

    • Finite difference and finite volume methods
    • Interfaces to finite element methods
    • Physics-Informed Neural Networks (PINNs)
    • Integro-Differential Equations
    • Fractional Differential Equations
  • Specialized Forms

    • Partial Integro-Differential Equations (PIPDEProblem)
  • Data-driven modeling

    • Discrete-time data-driven dynamical systems (DiscreteDataDrivenProblem)
    • Continuous-time data-driven dynamical systems (ContinuousDataDrivenProblem)
    • Symbolic regression (DirectDataDrivenProblem)
  • Uncertainty quantification and expected values (ExpectationProblem)

The SciML common interface also includes ModelingToolkit.jl for defining such systems symbolically, allowing for optimizations like automated generation of parallel code, symbolic simplification, and generation of sparsity patterns.

Inverse Problems, Parameter Estimation, and Structural Identification

Parameter estimation and inverse problems are solved directly on their constituent problem types using tools like SciMLSensitivity.jl. Thus for example, there is no ODEInverseProblem, and instead ODEProblem is used to find the parameters p that solve the inverse problem. Check out the SciMLSensitivity documentation for a discussion on connections to automatic differentiation, optimization, and adjoints.

Common Interface High-Level Overview

The SciML interface is common as the usage of arguments is standardized across all of the problem domains. Underlying high-level ideas include:

  • All domains use the same interface of defining a AbstractSciMLProblem which is then solved via solve(prob,alg;kwargs), where alg is a AbstractSciMLAlgorithm. The keyword argument namings are standardized across the organization.
  • AbstractSciMLProblems are generally defined by a AbstractSciMLFunction which can define extra details about a model function, such as its analytical Jacobian, its sparsity patterns and so on.
  • There is an organization-wide method for defining linear and nonlinear solvers used within other solvers, giving maximum control of performance to the user.
  • Types used within the packages are defined by the input types. For example, packages attempt to internally use the type of the initial condition as the type for the state within differential equation solvers.
  • solve calls should be thread-safe and parallel-safe.
  • init(prob,alg;kwargs) returns an iterator which allows for directly iterating over the solution process
  • High performance is key. Any performance that is not at the top level is considered a bug and should be reported as such.
  • All functions have an in-place and out-of-place form, where the in-place form is made to utilize mutation for high performance on large-scale problems and the out-of-place form is for compatibility with tooling like static arrays and some reverse-mode automatic differentiation systems.

Flowchart Example for PDE-Constrained Optimal Control

The following example showcases how the pieces of the common interface connect to solve a problem that mixes inference, symbolics, and numerics.

External Binding Libraries

  • diffeqr

    • Solving differential equations in R using DifferentialEquations.jl with ModelingToolkit for JIT compilation and GPU-acceleration
  • diffeqpy

    • Solving differential equations in Python using DifferentialEquations.jl

Note About Third-Party Libraries

The SciML documentation references and recommends many third-party libraries for improving ones modeling, simulation, and analysis workflow in Julia. Take these as a positive affirmation of the quality of these libraries, as these libraries are commonly tested by SciML developers who are in contact with the development teams of these groups. It also documents the libraries which are commonly chosen by SciML as dependencies. Do not take omissions as negative affirmations against a given library, i.e. a library left off of the list by SciML is not a negative endorsement. Rather, it means that compatibility with SciML is untested, SciML developers may have a personal preference for another choice, or SciML developers may be simply unaware of the library's existence. If one would like to add a third-party library to the SciML documentation, open a pull request with the requested text.

Note that the libraries in this documentation are only those that are meant to be used in the SciML extended universe of modeling, simulation, and analysis and thus there are many high-quality libraries in other domains (machine learning, data science, etc.) which are purposefully not included. For an overview of the Julia package ecosystem, see the JuliaHub Search Engine.

+Detailed Overview of the SciML Software Ecosystem · Overview of Julia's SciML

Detailed Overview of the SciML Software Ecosystem

SciML: Combining High-Performance Scientific Computing and Machine Learning

SciML is not standard machine learning, SciML is the combination of scientific computing techniques with machine learning. Thus the SciML organization is not an organization for machine learning libraries (see FluxML for machine learning in Julia), rather SciML is an organization dedicated to the development of scientific computing tools which work seamlessly in conjunction with next-generation machine learning workflows. This includes:

  • High-performance and accurate tools for standard scientific computing modeling and simulation
  • Compatibility with differentiable programming and automatic differentiation
  • Tools for building complex multiscale models
  • Methods for handling inverse problems, model calibration, controls, and Bayesian analysis
  • Symbolic modeling tools for generating efficient code for numerical equation solvers
  • Methods for automatic discovery of (bio)physical equations

and much more. For an overview of the broad goals of the SciML organization, watch:

Overview of Computational Science in Julia with SciML

Below is a simplification of the user-facing packages for use in scientific computing and SciML workflows.

Workflow ElementSciML-Supported Julia packages
Plotting and VisualizationPlots*, Makie*
Sparse matrixSparseArrays*
Interpolation/approximationDataInterpolations*, ApproxFun*
Linear system / least squaresLinearSolve
Nonlinear system / rootfindingNonlinearSolve
Polynomial rootsPolynomials*
IntegrationIntegrals
Nonlinear OptimizationOptimization
Other Optimization (linear, quadratic, convex, etc.)JuMP*
Initial-value problemDifferentialEquations
Boundary-value problemDifferentialEquations
Continuous-Time Markov Chains (Poisson Jumps), Jump DiffusionsJumpProcesses
Finite differencesFiniteDifferences*, FiniteDiff*
Automatic DifferentiationForwardDiff*, Enzyme*, DiffEqSensitivity
Bayesian ModelingTuring*
Deep LearningFlux*
Acausal Modeling / DAEsModelingToolkit
Chemical Reaction NetworksCatalyst
Symbolic ComputingSymbolics
Fast Fourier TransformFFTW*
Partial Differential Equation DiscretizationsAssociated Julia packages
–-–-
Finite DifferencesMethodOfLines
Discontinuous GalerkinTrixi*
Finite ElementGridap*
Physics-Informed Neural NetworksNeuralPDE
Neural OperatorsNeuralOperators
High Dimensional Deep LearningHighDimPDE

* Denotes a non-SciML package that is heavily tested against as part of SciML workflows and has frequent collaboration with the SciML developers.

SciML Mind Map

Domains of SciML

The SciML common interface covers the following domains:

  • Linear systems (LinearProblem)

    • Direct methods for dense and sparse
    • Iterative solvers with preconditioning
  • Nonlinear Systems (NonlinearProblem)

    • Systems of nonlinear equations
    • Scalar bracketing systems
  • Integrals (quadrature) (IntegralProblem)

  • Differential Equations

    • Discrete equations (function maps, discrete stochastic (Gillespie/Markov) simulations) (DiscreteProblem and JumpProblem)
    • Ordinary differential equations (ODEs) (ODEProblem)
    • Split and Partitioned ODEs (Symplectic integrators, IMEX Methods) (SplitODEProblem)
    • Stochastic ordinary differential equations (SODEs or SDEs) (SDEProblem)
    • Stochastic differential-algebraic equations (SDAEs) (SDEProblem with mass matrices)
    • Random differential equations (RODEs or RDEs) (RODEProblem)
    • Differential algebraic equations (DAEs) (DAEProblem and ODEProblem with mass matrices)
    • Delay differential equations (DDEs) (DDEProblem)
    • Neutral, retarded, and algebraic delay differential equations (NDDEs, RDDEs, and DDAEs)
    • Stochastic delay differential equations (SDDEs) (SDDEProblem)
    • Experimental support for stochastic neutral, retarded, and algebraic delay differential equations (SNDDEs, SRDDEs, and SDDAEs)
    • Mixed discrete and continuous equations (Hybrid Equations, Jump Diffusions) (DEProblems with callbacks and JumpProblem)
  • Optimization (OptimizationProblem)

    • Nonlinear (constrained) optimization
  • (Stochastic/Delay/Differential-Algebraic) Partial Differential Equations (PDESystem)

    • Finite difference and finite volume methods
    • Interfaces to finite element methods
    • Physics-Informed Neural Networks (PINNs)
    • Integro-Differential Equations
    • Fractional Differential Equations
  • Specialized Forms

    • Partial Integro-Differential Equations (PIPDEProblem)
  • Data-driven modeling

    • Discrete-time data-driven dynamical systems (DiscreteDataDrivenProblem)
    • Continuous-time data-driven dynamical systems (ContinuousDataDrivenProblem)
    • Symbolic regression (DirectDataDrivenProblem)
  • Uncertainty quantification and expected values (ExpectationProblem)

The SciML common interface also includes ModelingToolkit.jl for defining such systems symbolically, allowing for optimizations like automated generation of parallel code, symbolic simplification, and generation of sparsity patterns.

Inverse Problems, Parameter Estimation, and Structural Identification

Parameter estimation and inverse problems are solved directly on their constituent problem types using tools like SciMLSensitivity.jl. Thus for example, there is no ODEInverseProblem, and instead ODEProblem is used to find the parameters p that solve the inverse problem. Check out the SciMLSensitivity documentation for a discussion on connections to automatic differentiation, optimization, and adjoints.

Common Interface High-Level Overview

The SciML interface is common as the usage of arguments is standardized across all of the problem domains. Underlying high-level ideas include:

  • All domains use the same interface of defining a AbstractSciMLProblem which is then solved via solve(prob,alg;kwargs), where alg is a AbstractSciMLAlgorithm. The keyword argument namings are standardized across the organization.
  • AbstractSciMLProblems are generally defined by a AbstractSciMLFunction which can define extra details about a model function, such as its analytical Jacobian, its sparsity patterns and so on.
  • There is an organization-wide method for defining linear and nonlinear solvers used within other solvers, giving maximum control of performance to the user.
  • Types used within the packages are defined by the input types. For example, packages attempt to internally use the type of the initial condition as the type for the state within differential equation solvers.
  • solve calls should be thread-safe and parallel-safe.
  • init(prob,alg;kwargs) returns an iterator which allows for directly iterating over the solution process
  • High performance is key. Any performance that is not at the top level is considered a bug and should be reported as such.
  • All functions have an in-place and out-of-place form, where the in-place form is made to utilize mutation for high performance on large-scale problems and the out-of-place form is for compatibility with tooling like static arrays and some reverse-mode automatic differentiation systems.

Flowchart Example for PDE-Constrained Optimal Control

The following example showcases how the pieces of the common interface connect to solve a problem that mixes inference, symbolics, and numerics.

External Binding Libraries

  • diffeqr

    • Solving differential equations in R using DifferentialEquations.jl with ModelingToolkit for JIT compilation and GPU-acceleration
  • diffeqpy

    • Solving differential equations in Python using DifferentialEquations.jl

Note About Third-Party Libraries

The SciML documentation references and recommends many third-party libraries for improving ones modeling, simulation, and analysis workflow in Julia. Take these as a positive affirmation of the quality of these libraries, as these libraries are commonly tested by SciML developers who are in contact with the development teams of these groups. It also documents the libraries which are commonly chosen by SciML as dependencies. Do not take omissions as negative affirmations against a given library, i.e. a library left off of the list by SciML is not a negative endorsement. Rather, it means that compatibility with SciML is untested, SciML developers may have a personal preference for another choice, or SciML developers may be simply unaware of the library's existence. If one would like to add a third-party library to the SciML documentation, open a pull request with the requested text.

Note that the libraries in this documentation are only those that are meant to be used in the SciML extended universe of modeling, simulation, and analysis and thus there are many high-quality libraries in other domains (machine learning, data science, etc.) which are purposefully not included. For an overview of the Julia package ecosystem, see the JuliaHub Search Engine.

diff --git a/dev/showcase/bayesian_neural_ode/3a8ed773.svg b/dev/showcase/bayesian_neural_ode/3a8ed773.svg deleted file mode 100644 index 24a7a2e81a7..00000000000 --- a/dev/showcase/bayesian_neural_ode/3a8ed773.svg +++ /dev/null @@ -1,731 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev/showcase/bayesian_neural_ode/45fa1250.svg b/dev/showcase/bayesian_neural_ode/45fa1250.svg deleted file mode 100644 index 309607417e0..00000000000 --- a/dev/showcase/bayesian_neural_ode/45fa1250.svg +++ /dev/null @@ -1,314 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev/showcase/bayesian_neural_ode/66ae42fa.svg b/dev/showcase/bayesian_neural_ode/66ae42fa.svg new file mode 100644 index 00000000000..aab2493127f --- /dev/null +++ b/dev/showcase/bayesian_neural_ode/66ae42fa.svg @@ -0,0 +1,389 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/bayesian_neural_ode/87926623.svg b/dev/showcase/bayesian_neural_ode/87926623.svg new file mode 100644 index 00000000000..66b79c77a74 --- /dev/null +++ b/dev/showcase/bayesian_neural_ode/87926623.svg @@ -0,0 +1,320 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/bayesian_neural_ode/ba037494.svg b/dev/showcase/bayesian_neural_ode/ba037494.svg deleted file mode 100644 index 6aace798061..00000000000 --- a/dev/showcase/bayesian_neural_ode/ba037494.svg +++ /dev/null @@ -1,389 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev/showcase/bayesian_neural_ode/d00c425e.svg b/dev/showcase/bayesian_neural_ode/d00c425e.svg new file mode 100644 index 00000000000..050b0503bf8 --- /dev/null +++ b/dev/showcase/bayesian_neural_ode/d00c425e.svg @@ -0,0 +1,731 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/bayesian_neural_ode/e7f7768c.svg b/dev/showcase/bayesian_neural_ode/e7f7768c.svg deleted file mode 100644 index 7499a3b3ed7..00000000000 --- a/dev/showcase/bayesian_neural_ode/e7f7768c.svg +++ /dev/null @@ -1,169 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev/showcase/bayesian_neural_ode/f8057e8b.svg b/dev/showcase/bayesian_neural_ode/f8057e8b.svg new file mode 100644 index 00000000000..b14c9c26f74 --- /dev/null +++ b/dev/showcase/bayesian_neural_ode/f8057e8b.svg @@ -0,0 +1,169 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/bayesian_neural_ode/index.html b/dev/showcase/bayesian_neural_ode/index.html index b7500da4c24..acef9bb3e46 100644 --- a/dev/showcase/bayesian_neural_ode/index.html +++ b/dev/showcase/bayesian_neural_ode/index.html @@ -20,24 +20,24 @@ prob_neuralode = NeuralODE(dudt2, tspan, Tsit5(), saveat = tsteps) rng = Random.default_rng() p = Float64.(prob_neuralode.p)
252-element Vector{Float64}:
- -0.017064884305000305
-  0.06599725037813187
-  0.13436728715896606
-  0.01417487021535635
-  0.18168458342552185
- -0.2803120017051697
-  0.25211381912231445
-  0.26262709498405457
- -0.12438465654850006
- -0.3154034912586212
+ -0.33000418543815613
+  0.18256615102291107
+  0.2278011590242386
+  0.06275352835655212
+ -0.29760780930519104
+  0.12547996640205383
+  0.06975223869085312
+  0.21861077845096588
+ -0.1238202229142189
+ -0.1958921253681183
   ⋮
- -0.012600569054484367
-  0.10846768319606781
- -0.04696580395102501
-  0.08232373744249344
-  0.13289421796798706
-  0.19088612496852875
- -0.20355525612831116
+ -0.26335686445236206
+  0.19309598207473755
+ -0.007345013786107302
+  0.04693717509508133
+ -0.19524787366390228
+ -0.044687483459711075
+  0.1797753870487213
   0.0
   0.0

Note that the f64 is required to put the Flux neural network into Float64 precision.

Step 3: Define the loss function for the Neural ODE.

function predict_neuralode(p)
     Array(prob_neuralode(u0, p))
@@ -57,11 +57,11 @@
 h = Hamiltonian(metric, l, dldθ)
Hamiltonian(metric=DiagEuclideanMetric([1.0, 1.0, 1.0, 1.0, 1.0, 1 ...]), kinetic=AdvancedHMC.GaussianKinetic())

We use the NUTS sampler with an acceptance ratio of δ= 0.45 in this example. In addition, we use Nesterov Dual Averaging for the Step Size adaptation.

We sample using 500 warmup samples and 500 posterior samples.

integrator = Leapfrog(find_good_stepsize(h, p))
 kernel = HMCKernel(Trajectory{MultinomialTS}(integrator, GeneralisedNoUTurn()))
 adaptor = StanHMCAdaptor(MassMatrixAdaptor(metric), StepSizeAdaptor(0.45, integrator))
-samples, stats = sample(h, kernel, p, 500, adaptor, 500; progress = true)
([[-0.0365001103705493, 0.07662583789738267, 0.10693645264362381, -0.006826415099589554, 0.15761375841631212, -0.2804456539727617, 0.2518347221802681, 0.27045381460130574, -0.15061491956411044, -0.3465536777531528  …  -0.24762138875810474, -0.03160795137505377, 0.18497575441112835, -0.006970298153234612, 0.05210029790050283, 0.16172357851267, 0.16278859212473046, -0.21283546106906664, -0.07300956550290734, -0.0191014127646561], [-0.0365001103705493, 0.07662583789738267, 0.10693645264362381, -0.006826415099589554, 0.15761375841631212, -0.2804456539727617, 0.2518347221802681, 0.27045381460130574, -0.15061491956411044, -0.3465536777531528  …  -0.24762138875810474, -0.03160795137505377, 0.18497575441112835, -0.006970298153234612, 0.05210029790050283, 0.16172357851267, 0.16278859212473046, -0.21283546106906664, -0.07300956550290734, -0.0191014127646561], [-0.0365001103705493, 0.07662583789738267, 0.10693645264362381, -0.006826415099589554, 0.15761375841631212, -0.2804456539727617, 0.2518347221802681, 0.27045381460130574, -0.15061491956411044, -0.3465536777531528  …  -0.24762138875810474, -0.03160795137505377, 0.18497575441112835, -0.006970298153234612, 0.05210029790050283, 0.16172357851267, 0.16278859212473046, -0.21283546106906664, -0.07300956550290734, -0.0191014127646561], [0.029446717395081437, 0.5476788322431175, 0.23481048530621892, -0.19762513069098187, 0.6093678878681814, -0.38807520037902005, -0.149849898488166, 0.1328877985582313, -0.6180116605525925, -0.5356703259709815  …  -0.5919539409328056, -0.2570008393950996, 0.4178928665299082, 0.3236255483599075, -0.08961782352879413, 0.1821868469654969, -0.16189924590371071, -0.5659509327517164, -0.8999463424430814, -0.2723278310088356], [0.029446717395081437, 0.5476788322431175, 0.23481048530621892, -0.19762513069098187, 0.6093678878681814, -0.38807520037902005, -0.149849898488166, 0.1328877985582313, -0.6180116605525925, -0.5356703259709815  …  -0.5919539409328056, -0.2570008393950996, 0.4178928665299082, 0.3236255483599075, -0.08961782352879413, 0.1821868469654969, -0.16189924590371071, -0.5659509327517164, -0.8999463424430814, -0.2723278310088356], [-0.3998267125696636, 0.1684650000250018, 0.5965218782262774, -0.23274304357790626, 0.9753103083723947, -0.2515313916979882, -0.033385801483908906, -0.18401299918012085, -1.0363940731966157, -0.32857285609544945  …  -0.3705834402254309, 0.19985489272165424, 0.16221039849262972, 0.2824806945600669, -0.23922372166033734, 0.08759380938249411, 0.35663548941623513, 0.1928209052945163, -0.992654155485629, -0.29589578875348227], [-0.3998267125696636, 0.1684650000250018, 0.5965218782262774, -0.23274304357790626, 0.9753103083723947, -0.2515313916979882, -0.033385801483908906, -0.18401299918012085, -1.0363940731966157, -0.32857285609544945  …  -0.3705834402254309, 0.19985489272165424, 0.16221039849262972, 0.2824806945600669, -0.23922372166033734, 0.08759380938249411, 0.35663548941623513, 0.1928209052945163, -0.992654155485629, -0.29589578875348227], [-0.9003964953676126, 0.5723975848134228, 0.1385125231639766, -0.08171479194799164, 0.4256693487828702, -0.257866666892718, 0.5624380429278243, -0.5170484733891597, -0.7356397200617091, -0.2961081963892799  …  -0.3059851537908679, -0.14851840805990216, 0.046890288771885016, 0.3740493922695093, -0.5366678423413757, -0.2453017133815511, 0.6537852486369788, 0.17383628282391686, -1.1074352848995899, 0.029863481625184173], [1.0999897228913074, -0.6215852185125176, 0.15420060718148115, -0.7660211025067891, 0.21507381259815211, -0.5532111891230447, 0.018810456829994916, -0.11898670067429008, 0.7384169629726743, -0.08805730126225239  …  -0.23290305293385652, 0.7402887649886534, 0.17292875196665664, -0.04074434131417873, -0.6049331451725662, 0.10390813968108242, -1.0336816927553296, 0.3819274616782622, -0.45040296956672415, 0.43798369729906095], [1.064440364531824, -0.7982518914562085, 0.1236651385214875, -0.6508011566883268, 0.27393536171856964, -0.5446567645129938, 0.026934180193179412, -0.12959830873915837, 0.6786594927615918, -0.0519061165926059  …  -0.05213199141929464, 0.660124875793197, 0.21641626623247925, -0.06622654658910902, -0.5887732770202142, 0.06833725820760292, -1.0518911007359153, 0.4905388842912784, -0.5523076479797976, 0.4737290613881181]  …  [-1.1142411380261805, -0.7249512800768988, 0.6100348209846012, -0.40490182137352004, 0.605406413693122, 0.0372265442343984, 1.168901141937154, 0.2818526467884747, 0.4919685822801002, -1.559010245242933  …  -0.5108356312392858, 0.03091515010115376, 0.11944319451062131, -0.017262913039201064, -0.8437413256532023, 0.6726449591458137, -0.8064176250437681, -1.0246590527926245, -0.7486775523646163, -0.06553661537385033], [-1.5293409812933008, -0.3010586788941248, 0.6837701734029695, -0.5956119418824128, 0.6675379098220396, 0.854613136321031, 1.5306604319128703, 0.6101724342374177, 0.01118305807228976, -1.6174495592424443  …  -0.17227289232108978, -0.005996365046730918, 0.4031073094413611, -0.5789838467638541, -0.5281486631395474, 0.2117490677066908, -0.8204623785472371, -1.3158579530219496, -0.5483562004231934, -0.28034945503737385], [-1.5735618743739104, 0.2639712904661795, 1.0646408187013363, -0.7534499043321278, 0.3610820104846976, 0.4326499036818794, 1.2992564044127946, 0.1756740220347013, -0.890794913617, -0.29925124818363547  …  -1.0176744113625018, 0.9648512998572103, 0.34193095529052075, -0.23236144579011325, 0.8555811630598902, 0.4550669382663721, -0.042394350335922815, -0.7114477783270654, -0.19519028259910867, -0.44700028178039597], [-1.5735618743739104, 0.2639712904661795, 1.0646408187013363, -0.7534499043321278, 0.3610820104846976, 0.4326499036818794, 1.2992564044127946, 0.1756740220347013, -0.890794913617, -0.29925124818363547  …  -1.0176744113625018, 0.9648512998572103, 0.34193095529052075, -0.23236144579011325, 0.8555811630598902, 0.4550669382663721, -0.042394350335922815, -0.7114477783270654, -0.19519028259910867, -0.44700028178039597], [-0.14789381946806407, 0.9753853564912301, 1.134149577398367, -0.020847062741065094, 0.6387374859554742, -0.8865221257296082, -0.2560166634186708, 0.13172286137172104, -0.03938425262312624, -0.9252374069237924  …  -1.0416277936698495, -0.06617521384630183, -0.17629699910654692, -1.8746738714244522, 0.706186042997987, 0.3433546639254969, 0.5464978450331963, -0.7210560660271685, -0.36011718877280324, -0.2795266867843697], [-0.1449079582544856, 0.9898798503759197, 1.198607355318777, 0.028791258845097225, 0.6435484239701562, -0.9508653855887175, -0.2068420927355453, 0.029081147299749283, -0.027642009349759508, -0.9054563410454248  …  -0.9933276339872497, -0.08824627257337099, -0.14294146218036535, -1.8183770012254328, 0.8243300237119066, 0.3836169397168419, 0.472350660928713, -0.8116881663922773, -0.33260008138631575, -0.2526849425564585], [-0.5958515785779643, 0.8872298625026576, 0.8146998210310078, -0.3005912519735814, 0.6033585892483714, -0.9399305947602072, 0.3227818937663656, -0.5276469270006034, 0.3504111127230344, -0.5507943367185595  …  -0.723484351247283, -0.019349118826396484, 0.5685447558900238, -1.2981612585894238, 0.4009436411689893, 0.3916064851181499, 0.1547233541883041, 0.12713993023828823, -0.46779112618074037, 1.2375510049243454], [-0.8497675965024616, 0.5743503939918849, 1.3749085816000735, -0.28616336325474573, 0.8764992427714317, -0.8285455415075926, 0.7440988935279794, -0.6258584334749141, 0.7425018961529549, -0.529278296265905  …  -1.112873473591882, -0.38233737893485875, 0.5851367756351951, -0.9113447093860995, 0.5294557572741851, 0.6741724991762167, 0.21942124443640032, -0.18594601520394283, -0.577663050501345, 1.1723749724967343], [-0.20223973388685632, 1.1026446399813254, 0.6138936524776101, -1.3201675249820357, -0.37930285518446, -1.0946403562637026, 1.1392160117366072, -0.41041478336832876, 0.6372613327311712, -0.5237940535226279  …  -0.6044433213335302, -0.7123977913118706, 0.31994485334756384, -0.7225228310992909, 0.45729446337295493, 0.6081354813104022, 0.6125611309167934, -1.1768371569469955, -0.20611496667632623, 1.0039937396164327], [-0.4754681009452479, -0.10592572065797093, -0.025757777313702532, -0.42106890038157413, -0.8949556529056029, -0.6881116746141459, -0.40523943987396255, 0.3939060154769824, -0.48152480718865426, 2.159952062008057  …  1.3873616159064084, -0.5860662856355111, -0.2243321597671025, 0.974689698031958, -0.3322634739159816, 0.0556347101991409, 0.5725016266147862, 0.8634216170050774, -1.41005350252397, 0.11459781047412289]], NamedTuple[(n_steps = 43, is_accept = true, acceptance_rate = 0.9534883720930233, log_density = -240.7940341107005, hamiltonian_energy = 454.63970212428774, hamiltonian_energy_error = -32.154817183489115, max_hamiltonian_energy_error = 3712.1430399493656, tree_depth = 5, numerical_error = true, step_size = 0.025, nom_step_size = 0.025, is_adapt = true), (n_steps = 1, is_accept = true, acceptance_rate = 0.0, log_density = -240.7940341107005, hamiltonian_energy = 347.66807820795424, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 8264.864678511598, tree_depth = 0, numerical_error = true, step_size = 0.6244643995825304, nom_step_size = 0.6244643995825304, is_adapt = true), (n_steps = 1, is_accept = true, acceptance_rate = 0.0, log_density = -240.7940341107005, hamiltonian_energy = 380.1555039593866, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 19414.387135834306, tree_depth = 0, numerical_error = true, step_size = 0.28359132914091223, nom_step_size = 0.28359132914091223, is_adapt = true), (n_steps = 7, is_accept = true, acceptance_rate = 0.8571428571428571, log_density = -124.81406657712795, hamiltonian_energy = 331.44855112032553, hamiltonian_energy_error = -26.927460730182418, max_hamiltonian_energy_error = 5125.503197718298, tree_depth = 2, numerical_error = true, step_size = 0.08691050755476441, nom_step_size = 0.08691050755476441, is_adapt = true), (n_steps = 2, is_accept = true, acceptance_rate = 3.5457279535657673e-188, log_density = -124.81406657712795, hamiltonian_energy = 241.53053725168905, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 90297.588900227, tree_depth = 1, numerical_error = true, step_size = 0.2577102414277276, nom_step_size = 0.2577102414277276, is_adapt = true), (n_steps = 10, is_accept = true, acceptance_rate = 0.8390474460415358, log_density = -93.4553471240417, hamiltonian_energy = 258.1872095972553, hamiltonian_energy_error = -2.104625457195823, max_hamiltonian_energy_error = 1349.1513936621673, tree_depth = 3, numerical_error = true, step_size = 0.06745875223112528, nom_step_size = 0.06745875223112528, is_adapt = true), (n_steps = 4, is_accept = true, acceptance_rate = 4.233084046407511e-14, log_density = -93.4553471240417, hamiltonian_energy = 225.28966518293856, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 121694.65630661651, tree_depth = 2, numerical_error = true, step_size = 0.21430151177261222, nom_step_size = 0.21430151177261222, is_adapt = true), (n_steps = 50, is_accept = true, acceptance_rate = 0.18634179717446778, log_density = -66.34516168067154, hamiltonian_energy = 206.34073433186174, hamiltonian_energy_error = -2.4629850493701326, max_hamiltonian_energy_error = 1469.5592482854936, tree_depth = 5, numerical_error = true, step_size = 0.052674762257064946, nom_step_size = 0.052674762257064946, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.7710397989388681, log_density = -91.20656968490181, hamiltonian_energy = 185.4155197386297, hamiltonian_energy_error = -0.6116572939347407, max_hamiltonian_energy_error = 18.558755393610397, tree_depth = 7, numerical_error = false, step_size = 0.022657816627083424, nom_step_size = 0.022657816627083424, is_adapt = true), (n_steps = 9, is_accept = true, acceptance_rate = 0.23218915664889858, log_density = -90.76533229082536, hamiltonian_energy = 226.5642735932206, hamiltonian_energy_error = 0.6271888595722146, max_hamiltonian_energy_error = 1742.7946323547776, tree_depth = 3, numerical_error = true, step_size = 0.061725916309347637, nom_step_size = 0.061725916309347637, is_adapt = true)  …  (n_steps = 127, is_accept = true, acceptance_rate = 0.31845504235275146, log_density = -118.2012734260004, hamiltonian_energy = 243.6387458938492, hamiltonian_energy_error = -0.4320705016482691, max_hamiltonian_energy_error = 58.75819380706022, tree_depth = 7, numerical_error = false, step_size = 0.03983945160192428, nom_step_size = 0.03983945160192428, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.40990967311252324, log_density = -123.57012583825166, hamiltonian_energy = 241.82043253679348, hamiltonian_energy_error = -0.30784544870306263, max_hamiltonian_energy_error = 10.626729723682331, tree_depth = 7, numerical_error = false, step_size = 0.02908125842169259, nom_step_size = 0.02908125842169259, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.9949607235864056, log_density = -121.19762683238791, hamiltonian_energy = 238.85501432404607, hamiltonian_energy_error = -0.20344627544733385, max_hamiltonian_energy_error = -0.7043793917542018, tree_depth = 7, numerical_error = false, step_size = 0.02678507506925079, nom_step_size = 0.02678507506925079, is_adapt = true), (n_steps = 3, is_accept = true, acceptance_rate = 8.074964737611342e-7, log_density = -121.19762683238791, hamiltonian_energy = 247.6908346196038, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 2200.9901683394655, tree_depth = 1, numerical_error = true, step_size = 0.10504859767374852, nom_step_size = 0.10504859767374852, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.7933062039985944, log_density = -133.82059097299336, hamiltonian_energy = 249.09174623657293, hamiltonian_energy_error = 0.033996045738149405, max_hamiltonian_energy_error = 4.048916636572756, tree_depth = 7, numerical_error = false, step_size = 0.03505581832473263, nom_step_size = 0.03505581832473263, is_adapt = true), (n_steps = 27, is_accept = true, acceptance_rate = 0.07389801769235696, log_density = -133.4302732273992, hamiltonian_energy = 250.26751498715356, hamiltonian_energy_error = -0.01612719806146856, max_hamiltonian_energy_error = 4814.420657771321, tree_depth = 4, numerical_error = true, step_size = 0.08227987750961838, nom_step_size = 0.08227987750961838, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.27515586265148045, log_density = -145.28657310648342, hamiltonian_energy = 276.75675062974716, hamiltonian_energy_error = -0.0006878448273255344, max_hamiltonian_energy_error = 604.463431492881, tree_depth = 7, numerical_error = false, step_size = 0.033404513274136044, nom_step_size = 0.033404513274136044, is_adapt = true), (n_steps = 255, is_accept = true, acceptance_rate = 0.21040537975635656, log_density = -146.25147643097736, hamiltonian_energy = 273.8423860994583, hamiltonian_energy_error = 0.9319702873511346, max_hamiltonian_energy_error = 6.878018158717623, tree_depth = 8, numerical_error = false, step_size = 0.022282778643361825, nom_step_size = 0.022282778643361825, is_adapt = true), (n_steps = 255, is_accept = true, acceptance_rate = 0.9114891052910615, log_density = -127.18425525658962, hamiltonian_energy = 277.69690858130696, hamiltonian_energy_error = -1.226450551100072, max_hamiltonian_energy_error = -1.789227256834124, tree_depth = 8, numerical_error = false, step_size = 0.012803389100619787, nom_step_size = 0.012803389100619787, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.6711025415891773, log_density = -136.38911723966737, hamiltonian_energy = 255.7964537230249, hamiltonian_energy_error = -0.44266706532675926, max_hamiltonian_energy_error = 73.64881012764283, tree_depth = 7, numerical_error = false, step_size = 0.03911860869579309, nom_step_size = 0.03911860869579309, is_adapt = true)])

Step 5: Plot diagnostics

Now let's make sure the fit is good. This can be done by looking at the chain mixing plot and the autocorrelation plot. First, let's create the chain mixing plot using the plot recipes from ????

samples = hcat(samples...)
+samples, stats = sample(h, kernel, p, 500, adaptor, 500; progress = true)
([[-0.3037718374519981, 0.20697417338375573, 0.20009739337742005, 0.05573280004546516, -0.30040488070559923, 0.14712863500850687, 0.19776391368170354, 0.1659888453927852, -0.10462018028520975, -0.18665576931492692  …  -0.009903067818086989, -0.2803448743730167, 0.14223906130065403, 0.003168809886588367, -0.007878388218809186, -0.24083232941035276, -0.029113762397749703, 0.16194353966396968, -0.03204578204505744, -0.057359706563682236], [-0.3037718374519981, 0.20697417338375573, 0.20009739337742005, 0.05573280004546516, -0.30040488070559923, 0.14712863500850687, 0.19776391368170354, 0.1659888453927852, -0.10462018028520975, -0.18665576931492692  …  -0.009903067818086989, -0.2803448743730167, 0.14223906130065403, 0.003168809886588367, -0.007878388218809186, -0.24083232941035276, -0.029113762397749703, 0.16194353966396968, -0.03204578204505744, -0.057359706563682236], [-0.3037718374519981, 0.20697417338375573, 0.20009739337742005, 0.05573280004546516, -0.30040488070559923, 0.14712863500850687, 0.19776391368170354, 0.1659888453927852, -0.10462018028520975, -0.18665576931492692  …  -0.009903067818086989, -0.2803448743730167, 0.14223906130065403, 0.003168809886588367, -0.007878388218809186, -0.24083232941035276, -0.029113762397749703, 0.16194353966396968, -0.03204578204505744, -0.057359706563682236], [-0.29691070137188535, 0.21588799017313443, 0.24653823867184085, 0.053312206814049426, -0.2613026506949713, 0.12090971117858465, 0.319856390093161, 0.07625202557107344, -0.03475007961528276, -0.33725059286267545  …  -0.010193938711850939, -0.2717135475556589, 0.1884214873125263, 0.04271435831508523, -0.10842256599719677, -0.25084355399947567, -0.1760433367803132, 0.09146764309167016, -0.0668999756716524, -0.11780129484389049], [-0.397954587720602, 0.5950280011987865, 0.261167247179764, 0.4208194685681579, -0.2780011125976699, 0.5753302413511568, 0.6792855684414978, -0.3666697188699853, 0.3370861727376231, -0.6832245197383024  …  -0.046480642952961775, -0.6911771654827297, 0.18626181993680402, 0.036353770863603675, -0.38762005798137333, -0.4974986640838234, -0.2504252519287518, -0.11616979338816062, -0.7521418046573951, -0.05052930494050064], [-0.6137859673901089, 0.09865016874143273, -1.019859949428721, 0.19782176761416498, 1.5430737067299982, 0.07651235496895109, -0.9418901596954975, 1.027540862568245, -0.1591632711495474, 0.8177823700036208  …  -0.851149947667358, 0.5151711281792108, -0.011558392226636896, 0.11452382302224663, -0.007891307846206546, -0.04694845287212287, 0.4703529095530492, -0.09263775633348377, -0.10925839152596686, 0.20091583939037863], [-0.6103138654887736, -0.0123191998165144, -1.0057393973143482, 0.14606313396728732, 1.5226454966932454, 0.005835633116130168, -1.0602337857871178, 1.0217597704598977, -0.21916712128655114, 0.8343545979538585  …  -0.8802602390752114, 0.5088146692892361, 0.0382448511311577, 0.1707668301248749, 0.11425616451536985, 0.07824145629715898, 0.48320682164253614, -0.19928822500559262, -0.1740577053971936, 0.373500921295268], [-0.6423186748830795, -0.0649227747524189, -1.0135320392352305, 0.19617150412209722, 1.4153607552194232, 0.0970852867552654, -0.9431996030957539, 0.9326681946301901, -0.2356342584556591, 0.8984149408631361  …  -0.9978860189555723, 0.43787711397801704, 0.03209595309040883, 0.23416097516216558, 0.09848292113929674, 0.026482484512331244, 0.5527075991385588, -0.26636237628762965, -0.14726441176201624, 0.3649315684686471], [1.0428346660686667, 0.2253179580910552, 0.8509697520755101, -0.02228898139733142, -0.9610323748734837, 0.27339671772238616, 1.235711168479749, -1.4884209951761214, 0.5693647000234058, -0.7429973058627362  …  1.082932105046157, -0.9371441167032311, 0.735611192652997, -0.4523524046197529, 0.3021160865596414, 0.44292422581251956, -0.19741176519577344, 0.6897557058767337, -0.420175126333405, 0.13226329552135868], [0.8860012402057185, 0.31735968240725176, 1.0235895499194314, 0.09772896970289315, -0.9514726764800459, 0.4734930035587907, 1.0381971248291788, -1.5369955097921377, 0.7061409203074208, -0.6123790504872595  …  1.15674654565022, -0.9346872810011589, 0.8556581755730104, -0.4466751331177564, 0.16346495556908097, 0.3969979988844653, -0.04282522191366425, 0.6822484342314333, -0.35013740927749815, 0.12825626417754182]  …  [0.32338643336441864, 1.118776169406332, -0.16636721072562816, -0.2679360292123409, -0.9235750414401375, 0.2476616882822373, 0.512306562679021, 1.066145974974403, 0.491458646062991, 0.3141739409111772  …  -1.546580578470816, 0.7485599944723269, 1.0146192819173752, 0.6887335368663878, 0.26864641596824007, -0.8480352654204433, -0.10450792927806617, 0.536209571632343, -1.2051995169738596, -0.37919546106260993], [0.43367736375887384, 0.25807924797535214, 1.5141054749623817, 0.8221764216261276, 0.8133153256537758, 0.5265866831152275, -1.1175431298579637, 0.6280122814931952, -0.7309749212898246, 0.27743633521009936  …  0.3009665054201581, -0.04255793054184787, 0.4225556460151949, -0.6142750915571626, -0.3637518574749311, 0.2145539417038417, 0.6631333612917966, -1.25584186075243, 0.15980853967457248, 0.6078494930562787], [0.43367736375887384, 0.25807924797535214, 1.5141054749623817, 0.8221764216261276, 0.8133153256537758, 0.5265866831152275, -1.1175431298579637, 0.6280122814931952, -0.7309749212898246, 0.27743633521009936  …  0.3009665054201581, -0.04255793054184787, 0.4225556460151949, -0.6142750915571626, -0.3637518574749311, 0.2145539417038417, 0.6631333612917966, -1.25584186075243, 0.15980853967457248, 0.6078494930562787], [0.23764694088757343, 0.11715287285000024, 1.0760790319067033, 0.8234825901491863, 0.1510134352132879, -0.11449090689873712, -0.31947399609506166, 0.15572075940483643, -0.9336607717313957, -0.046816812364573496  …  0.6295340603151671, 0.5422622620193168, 0.35447138454121196, -0.12872180116841025, 0.22371380168862723, -0.9102458623508148, 0.26524936719711495, -1.4416508591282153, -0.03234023035776547, 0.1097323954800877], [0.9860737879175963, 0.9489014925520216, -0.3954775147897305, -0.7603078179783537, 0.4556091186719217, 0.6682478135964137, -0.26245984767882086, 1.1737057195404976, 0.17153316461882548, -0.46800528447646056  …  0.02601328256258946, -0.5078239448207768, 1.1658843971988138, -0.12418446816954587, 0.3057895485528346, 1.3241163610731477, 0.1994817358004073, -1.4098359227496555, -0.15291244675420052, 0.29339810856808496], [0.3762296372716479, 1.2828688752748092, -0.6107372088478676, -0.5532077529283348, 1.9678621192459655, 1.136308181569329, -0.292130777715354, 0.3093996172277447, -0.044880313314094866, -0.4462581823866875  …  -0.3050566953726493, -0.16459278232767235, 0.7174375687667752, -0.0007339368240170761, 0.13179354575710808, 0.6144612779334313, -0.2565494244774203, -0.7311569899655469, -0.06374015636088345, 0.21364323086225318], [0.7923399494476693, -0.9169905058993797, -0.865742107835365, 0.5138518061545659, -1.2335081137719648, 0.4975695093659647, 0.06794343734760834, -0.09751499072123879, -0.053892676864658526, 0.2574713494289282  …  0.10586373812591328, -0.39642631868408096, 1.2158003093282297, 0.1577692995107531, 0.9795949839878125, 0.3041530099550062, 0.14309152555196783, 0.1447230940591142, 0.5442322850494049, 0.14656991066145367], [0.7454496056565271, -0.9718466578731646, -0.7622454754939125, 0.5922156093271523, -1.2314617639240903, 0.550012305170155, 0.04360207006844958, -0.08981843967386613, -0.03446747198753157, 0.25569803480154313  …  0.20222739620859873, -0.37527546356675834, 1.2239918317026104, 0.14225638491962986, 1.048088355977439, 0.20455831232449723, 0.14384311469116876, 0.14937011878259976, 0.5209556778248, 0.0585916706991431], [-0.2555935522070284, 0.8618045080475314, 0.572823439759885, 0.5223412775386996, 1.0651525221704548, 0.4094093971797362, 0.19406013734189473, -0.3023159725691612, 0.5357594448242896, -0.04301645089875652  …  0.1940774124867116, 1.0412279764194867, 0.6512296785994517, -0.8786133899127575, -0.33177573497432633, 0.1391041918340996, 0.1464179785428038, 0.624870743433491, -0.8191093365693967, 0.20656634018036968], [0.7987458589730407, 0.5983895560953167, 0.04100159774168909, -0.05396265166129247, 0.07239261034419955, -0.35538842875094745, 0.9864086950794865, -0.39292975913521744, -0.5608805812266698, -0.2226751987546458  …  1.3980085814018537, -0.7795983433728828, 0.3358961656276581, 0.18284471262805704, 0.02860070245764756, 0.15728706715704893, -0.4418586780311624, -0.02554733778668905, -0.8216672567193084, 0.08773764398858928]], NamedTuple[(n_steps = 72, is_accept = true, acceptance_rate = 0.7361780904674246, log_density = -245.44125365804058, hamiltonian_energy = 475.7432475709959, hamiltonian_energy_error = -35.19492926787018, max_hamiltonian_energy_error = 1451.8533164278733, tree_depth = 6, numerical_error = true, step_size = 0.025, nom_step_size = 0.025, is_adapt = true), (n_steps = 1, is_accept = true, acceptance_rate = 0.0, log_density = -245.44125365804058, hamiltonian_energy = 365.04086099557844, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 2046.052417697826, tree_depth = 0, numerical_error = true, step_size = 0.4206430949671595, nom_step_size = 0.4206430949671595, is_adapt = true), (n_steps = 1, is_accept = true, acceptance_rate = 0.0, log_density = -245.44125365804058, hamiltonian_energy = 387.32776209814915, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 1584.625481104836, tree_depth = 0, numerical_error = true, step_size = 0.16992021002418856, nom_step_size = 0.16992021002418856, is_adapt = true), (n_steps = 54, is_accept = true, acceptance_rate = 0.35185185185185186, log_density = -206.2523011532844, hamiltonian_energy = 351.8225816437002, hamiltonian_energy_error = -12.12281110726758, max_hamiltonian_energy_error = 1051.4795732231116, tree_depth = 5, numerical_error = true, step_size = 0.04870655568174132, nom_step_size = 0.04870655568174132, is_adapt = true), (n_steps = 63, is_accept = true, acceptance_rate = 0.2266402844493179, log_density = -119.3958853006955, hamiltonian_energy = 316.3468659994153, hamiltonian_energy_error = -1.1402073122124534, max_hamiltonian_energy_error = 184.97033735862277, tree_depth = 6, numerical_error = false, step_size = 0.03269626179132209, nom_step_size = 0.03269626179132209, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.737382320016288, log_density = -115.18150701301832, hamiltonian_energy = 233.32932588788174, hamiltonian_energy_error = 0.46827615278238, max_hamiltonian_energy_error = 0.9769413280646972, tree_depth = 7, numerical_error = false, step_size = 0.015376591370299147, nom_step_size = 0.015376591370299147, is_adapt = true), (n_steps = 63, is_accept = true, acceptance_rate = 0.4134600743949512, log_density = -115.15121524852249, hamiltonian_energy = 244.311065930731, hamiltonian_energy_error = 0.02336890391947577, max_hamiltonian_energy_error = 21.23377619137395, tree_depth = 6, numerical_error = false, step_size = 0.034382066893746246, nom_step_size = 0.034382066893746246, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.137615259622465, log_density = -113.28404426663387, hamiltonian_energy = 254.4237086874802, hamiltonian_energy_error = 0.07695009345633252, max_hamiltonian_energy_error = 333.0464862827048, tree_depth = 6, numerical_error = false, step_size = 0.029692433100985457, nom_step_size = 0.029692433100985457, is_adapt = true), (n_steps = 255, is_accept = true, acceptance_rate = 0.8910703074289235, log_density = -113.65877723389558, hamiltonian_energy = 230.83072875550178, hamiltonian_energy_error = 0.12891112267340077, max_hamiltonian_energy_error = 0.3829285750150575, tree_depth = 8, numerical_error = false, step_size = 0.010898161372593643, nom_step_size = 0.010898161372593643, is_adapt = true), (n_steps = 107, is_accept = true, acceptance_rate = 0.01398605955042706, log_density = -109.21629572459008, hamiltonian_energy = 219.94548780794952, hamiltonian_energy_error = 0.3088870542823372, max_hamiltonian_energy_error = 1406.704812740226, tree_depth = 6, numerical_error = true, step_size = 0.0432197385488163, nom_step_size = 0.0432197385488163, is_adapt = true)  …  (n_steps = 63, is_accept = true, acceptance_rate = 0.17111398087285096, log_density = -147.57675855415744, hamiltonian_energy = 262.36428624670475, hamiltonian_energy_error = 1.362038502143264, max_hamiltonian_energy_error = 35.87602164803059, tree_depth = 6, numerical_error = false, step_size = 0.05598206962293892, nom_step_size = 0.05598206962293892, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.9778342361156972, log_density = -131.64259499050635, hamiltonian_energy = 265.67942408240174, hamiltonian_energy_error = -0.6939711248363665, max_hamiltonian_energy_error = -0.8367363040614464, tree_depth = 7, numerical_error = false, step_size = 0.028215999200788388, nom_step_size = 0.028215999200788388, is_adapt = true), (n_steps = 2, is_accept = true, acceptance_rate = 1.62966745918997e-120, log_density = -131.64259499050635, hamiltonian_energy = 257.63827188842833, hamiltonian_energy_error = 0.0, max_hamiltonian_energy_error = 1129.9538693850486, tree_depth = 1, numerical_error = true, step_size = 0.10729347417427919, nom_step_size = 0.10729347417427919, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.050814848412843006, log_density = -120.08271847101138, hamiltonian_energy = 255.76227369831906, hamiltonian_energy_error = 2.5890952334428334, max_hamiltonian_energy_error = 320.1584658859362, tree_depth = 7, numerical_error = false, step_size = 0.03559188243258389, nom_step_size = 0.03559188243258389, is_adapt = true), (n_steps = 255, is_accept = true, acceptance_rate = 0.8972509070797839, log_density = -134.83111166356124, hamiltonian_energy = 247.213915275076, hamiltonian_energy_error = -0.014179004694938158, max_hamiltonian_energy_error = 1.3583740756929217, tree_depth = 8, numerical_error = false, step_size = 0.013589376498436459, nom_step_size = 0.013589376498436459, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.23313035402606921, log_density = -130.66821813019973, hamiltonian_energy = 254.75378237361878, hamiltonian_energy_error = 0.33592731633024187, max_hamiltonian_energy_error = 126.351944374889, tree_depth = 7, numerical_error = false, step_size = 0.04146192378594498, nom_step_size = 0.04146192378594498, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.9090287444706494, log_density = -124.79484788333974, hamiltonian_energy = 245.97764865133394, hamiltonian_energy_error = -0.21138494908348093, max_hamiltonian_energy_error = -1.0168759807131096, tree_depth = 7, numerical_error = false, step_size = 0.024924404159045378, nom_step_size = 0.024924404159045378, is_adapt = true), (n_steps = 24, is_accept = true, acceptance_rate = 0.034420485374202135, log_density = -126.54951372132, hamiltonian_energy = 245.85156391641294, hamiltonian_energy_error = 0.3563096442701976, max_hamiltonian_energy_error = 3793.3685785153098, tree_depth = 4, numerical_error = true, step_size = 0.07668879863067375, nom_step_size = 0.07668879863067375, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.37928707898148645, log_density = -135.69375588698242, hamiltonian_energy = 242.595198242758, hamiltonian_energy_error = 1.8996888840778468, max_hamiltonian_energy_error = 7.004612312491048, tree_depth = 7, numerical_error = false, step_size = 0.028752354861037274, nom_step_size = 0.028752354861037274, is_adapt = true), (n_steps = 127, is_accept = true, acceptance_rate = 0.9012461259576452, log_density = -117.88984635680522, hamiltonian_energy = 255.22006158485834, hamiltonian_energy_error = -3.884625950185921, max_hamiltonian_energy_error = -4.366718110975, tree_depth = 7, numerical_error = false, step_size = 0.02475978379139366, nom_step_size = 0.02475978379139366, is_adapt = true)])

Step 5: Plot diagnostics

Now let's make sure the fit is good. This can be done by looking at the chain mixing plot and the autocorrelation plot. First, let's create the chain mixing plot using the plot recipes from ????

samples = hcat(samples...)
 samples_reduced = samples[1:5, :]
 samples_reshape = reshape(samples_reduced, (500, 5, 1))
 Chain_Spiral = Chains(samples_reshape)
-plot(Chain_Spiral)
Example block output

Now we check the autocorrelation plot:

autocorplot(Chain_Spiral)
Example block output

As another diagnostic, let's check the result on retrodicted data. To do this, we generate solutions of the Neural ODE on samples of the neural network parameters, and check the results of the predictions against the data. Let's start by looking at the time series:

pl = scatter(tsteps, ode_data[1, :], color = :red, label = "Data: Var1", xlabel = "t",
+plot(Chain_Spiral)
Example block output

Now we check the autocorrelation plot:

autocorplot(Chain_Spiral)
Example block output

As another diagnostic, let's check the result on retrodicted data. To do this, we generate solutions of the Neural ODE on samples of the neural network parameters, and check the results of the predictions against the data. Let's start by looking at the time series:

pl = scatter(tsteps, ode_data[1, :], color = :red, label = "Data: Var1", xlabel = "t",
              title = "Spiral Neural ODE")
 scatter!(tsteps, ode_data[2, :], color = :blue, label = "Data: Var2")
 for k in 1:300
@@ -75,11 +75,11 @@
 prediction = predict_neuralode(samples[:, idx])
 plot!(tsteps, prediction[1, :], color = :black, w = 2, label = "")
 plot!(tsteps, prediction[2, :], color = :black, w = 2, label = "Best fit prediction",
-      ylims = (-2.5, 3.5))
Example block output

That showed the time series form. We can similarly do a phase-space plot:

pl = scatter(ode_data[1, :], ode_data[2, :], color = :red, label = "Data", xlabel = "Var1",
+      ylims = (-2.5, 3.5))
Example block output

That showed the time series form. We can similarly do a phase-space plot:

pl = scatter(ode_data[1, :], ode_data[2, :], color = :red, label = "Data", xlabel = "Var1",
              ylabel = "Var2", title = "Spiral Neural ODE")
 for k in 1:300
     resol = predict_neuralode(samples[:, 100:end][:, rand(1:400)])
     plot!(resol[1, :], resol[2, :], alpha = 0.04, color = :red, label = "")
 end
 plot!(prediction[1, :], prediction[2, :], color = :black, w = 2,
-      label = "Best fit prediction", ylims = (-2.5, 3))
Example block output + label = "Best fit prediction", ylims = (-2.5, 3))Example block output diff --git a/dev/showcase/blackhole/index.html b/dev/showcase/blackhole/index.html index 1af420bdd5b..285f05bd622 100644 --- a/dev/showcase/blackhole/index.html +++ b/dev/showcase/blackhole/index.html @@ -466,4 +466,4 @@ Newt_waveform = compute_waveform(dt_data, Newtonian_solution, mass_ratio, model_params)[1] plt = plot(extended_tsteps,true_waveform, linewidth = 2, label = "truth", xlabel="Time", ylabel="Waveform") plot!(plt,extended_tsteps,pred_waveform, linestyle = :dash, linewidth = 2, label = "prediction") -plot!(plt,extended_tsteps,Newt_waveform, linewidth = 2, label = "Newtonian")Example block output +plot!(plt,extended_tsteps,Newt_waveform, linewidth = 2, label = "Newtonian")Example block output diff --git a/dev/showcase/brusselator/index.html b/dev/showcase/brusselator/index.html index 59baa3dc9e1..daa9ba53d37 100644 --- a/dev/showcase/brusselator/index.html +++ b/dev/showcase/brusselator/index.html @@ -378,4 +378,4 @@ ydomain:([0.0, 11.5], 0.0:0.03125:1.0, 0.0:0.03125:1.0) u: Dict{Symbolics.Num, Array{Float64, 3}} with 2 entries: u(x, y, t) => [0.0 0.115882 … 0.115882 0.0; 0.0 0.115882 … 0.115882 0.0; … ; … - v(x, y, t) => [0.0 0.0 … 0.0 0.0; 0.142219 0.142219 … 0.142219 0.142219; … ; …

And now we're zooming! For more information on these performance improvements, check out the deeper dive in the DifferentialEquations.jl tutorials.

If you're interested in figuring out what's the fastest current solver for this kind of PDE, check out the Brusselator benchmark in SciMLBenchmarks.jl

+ v(x, y, t) => [0.0 0.0 … 0.0 0.0; 0.142219 0.142219 … 0.142219 0.142219; … ; …

And now we're zooming! For more information on these performance improvements, check out the deeper dive in the DifferentialEquations.jl tutorials.

If you're interested in figuring out what's the fastest current solver for this kind of PDE, check out the Brusselator benchmark in SciMLBenchmarks.jl

diff --git a/dev/showcase/gpu_spde/18b4320d.svg b/dev/showcase/gpu_spde/18b4320d.svg new file mode 100644 index 00000000000..2fc74bcce11 --- /dev/null +++ b/dev/showcase/gpu_spde/18b4320d.svg @@ -0,0 +1,1515 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/gpu_spde/94eafa28.svg b/dev/showcase/gpu_spde/94eafa28.svg deleted file mode 100644 index 5680459ca2b..00000000000 --- a/dev/showcase/gpu_spde/94eafa28.svg +++ /dev/null @@ -1,1521 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev/showcase/gpu_spde/index.html b/dev/showcase/gpu_spde/index.html index 148e83ad2e6..a53d7f98adc 100644 --- a/dev/showcase/gpu_spde/index.html +++ b/dev/showcase/gpu_spde/index.html @@ -380,7 +380,7 @@ end
g (generic function with 1 method)

Now we just define and solve the system of SDEs:

prob = SDEProblem(f, g, u0, (0.0, 100.0))
 @time sol = solve(prob, SRIW1());
retcode: Success
 Interpolation: 1st order linear
-t: 81935-element Vector{Float64}:
+t: 81737-element Vector{Float64}:
    0.0
    9.999999999999999e-5
    0.0002125
@@ -392,40 +392,40 @@
    0.0012526276111602785
    0.0015092060625553133
    ⋮
-  99.97242984998377
-  99.97483468038679
-  99.9775401145902
-  99.98058372806904
-  99.98400779323272
-  99.98785986654187
-  99.99219344901466
-  99.99706872929656
+  99.99884712106036
+  99.99896004186259
+  99.99908344068763
+  99.99921874633198
+  99.99936774309911
+  99.99953301650224
+  99.99971791375455
+  99.9999259231634
  100.0
-u: 81935-element Vector{Array{Float64, 3}}:
+u: 81737-element Vector{Array{Float64, 3}}:
  [0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0; … ; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0;;; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0; … ; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0;;; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0; … ; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0]
- [4.999999999999999e-9 4.999999999999999e-9 … 9.999554819502825e-5 9.895663024476033e-5; 4.999999999999999e-9 4.999999999999999e-9 … 0.00010101035059008325 9.996842911200925e-5; … ; 4.999999999999999e-9 4.999999999999999e-9 … 0.00010104547499809213 0.00010005918116311528; 4.999999999999999e-9 4.999999999999999e-9 … 0.00010000610450764764 9.88815622145288e-5;;; 9.999999999999999e-5 9.999999999999999e-5 … 0.00010002675924576646 0.00010009278379940159; 9.999999999999999e-5 9.999999999999999e-5 … 9.993455111238553e-5 0.00010000581247104085; … ; 9.999999999999999e-5 9.999999999999999e-5 … 0.00010002869178885522 9.997025238158166e-5; 9.999999999999999e-5 9.999999999999999e-5 … 0.00010000596580286909 0.00010003553403404012;;; 9.999e-5 9.999e-5 … 9.999027284379187e-5 0.00010000983029375291; 9.999e-5 9.999e-5 … 9.996238229239112e-5 0.0001000016994221286; … ; 9.999e-5 9.999e-5 … 0.0001000095445470527 0.0001000500062089668; 9.999e-5 9.999e-5 … 9.998469470683099e-5 9.998507640993345e-5]
- [2.2327941313324602e-8 2.2556573893183975e-8 … 0.0002126471190886327 0.00020799211482932592; 2.2570575615125402e-8 2.281997434856065e-8 … 0.0002171853239136111 0.0002121389464415505; … ; 2.2571681771272805e-8 2.2786081360443666e-8 … 0.00021699345285285904 0.00021218592740615102; 2.233296266289066e-8 2.254761903861514e-8 … 0.0002123465226767719 0.00020809243007341305;;; 0.0002125000097996454 0.00021249997517052276 … 0.00021248785772687203 0.0002128209013589922; 0.00021250001712891928 0.00021250000741534206 … 0.0002127092329197624 0.00021238749254697703; … ; 0.00021249999454833664 0.00021249999661613285 … 0.00021265919256535727 0.00021243044746356606; 0.0002125000021580261 0.000212500003861927 … 0.00021247860066004522 0.00021257916126467329;;; 0.0002124548490745679 0.00021245485090437205 … 0.0002124603028008308 0.00021241178985704432; 0.00021245482953154949 0.00021245485655788562 … 0.00021266810486134663 0.00021245387782030956; … ; 0.00021245483620203955 0.00021245483710229273 … 0.00021236776068386215 0.0002122719248507061; 0.00021245486323679335 0.0002124548579947523 … 0.0002124592501573672 0.0002123495467563965]
- [5.634626907890387e-8 5.7440573813896956e-8 … 0.00033903426116994976 0.00032735927479724485; 5.746066234508273e-8 5.859176817416601e-8 … 0.0003509426051195404 0.0003389550595038105; … ; 5.7505115133819246e-8 5.8539050041839684e-8 … 0.00035025260702401266 0.000338856402155971; 5.629504401743447e-8 5.7389254510567214e-8 … 0.00033893778479214645 0.00032783868326085046;;; 0.00033906247806079814 0.0003390624382973563 … 0.0003392674718691006 0.0003395420031117181; 0.0003390624632480729 0.0003390624683694622 … 0.00033960634208836306 0.0003394045624450642; … ; 0.0003390624389213135 0.00033906245420706194 … 0.0003392707930702988 0.0003386107996306203; 0.0003390624653203015 0.00033906252142515197 … 0.00033844377365854555 0.0003394026695952993;;; 0.000338947592084294 0.00033894752759022767 … 0.0003387840152847246 0.00033889626989276996; 0.000338947622896451 0.00033894762719227547 … 0.000339549432672451 0.0003391006722479863; … ; 0.00033894761319695445 0.0003389475768175082 … 0.00033936827731457413 0.00033880759047517624; 0.0003389475216565692 0.0003389475492432646 … 0.0003387441028826303 0.0003389997329459985]
- [1.1239936328750536e-7 1.1585534235919849e-7 … 0.0004813478585467114 0.0004584844656632584; 1.1580372913956578e-7 1.1916910625830738e-7 … 0.0005037520987966521 0.0004812217681394159; … ; 1.1589917880784483e-7 1.1896686140376649e-7 … 0.0005032006366748411 0.00048150381975672354; 1.125180624457308e-7 1.1569775043858493e-7 … 0.00048160889746896857 0.00045989910701033337;;; 0.00048144518154738926 0.0004814451447411079 … 0.0004824560579575608 0.0004816483606537937; 0.0004814452782031786 0.0004814452985072646 … 0.00048209518010507304 0.0004812385149716528; … ; 0.00048144516418905807 0.00048144523747068607 … 0.0004819841608794415 0.0004813205541375525; 0.00048144545856046043 0.0004814454301742493 … 0.0004811658887116176 0.0004825375111553164;;; 0.0004812136348497755 0.0004812135858107144 … 0.0004802195655100756 0.0004811867027964109; 0.0004812136699772313 0.0004812137565244163 … 0.00048224815886427607 0.00048189242652844954; … ; 0.00048121372732450366 0.0004812134962155547 … 0.0004813374431347401 0.00048009862838714807; 0.00048121349083239586 0.00048121353237980444 … 0.0004814493752402824 0.0004806511925777898]
- [1.9757115044990709e-7 2.051810600979021e-7 … 0.0006408851996106616 0.0006037270593281997; 2.0558199554411907e-7 2.134502107260836e-7 … 0.0006801746659951226 0.0006413530101346581; … ; 2.0537315083904795e-7 2.1324879256532974e-7 … 0.0006799317130886457 0.000640976172300651; 1.978465048860651e-7 2.0540930144599645e-7 … 0.0006421123191740426 0.000604275195318291;;; 0.0006416259324983111 0.0006416256109801177 … 0.0006419754824417365 0.0006410506037436455; 0.00064162577638361 0.00064162602300059 … 0.0006426126959951361 0.0006416019629549842; … ; 0.0006416260918066663 0.0006416257345626896 … 0.0006417656071789267 0.0006403917955054739; 0.0006416264007139342 0.0006416259809557508 … 0.0006421704855012455 0.0006429740235132917;;; 0.0006412148401666664 0.0006412142147800626 … 0.0006400221566283908 0.0006417071134487462; 0.0006412145586768854 0.0006412144882393782 … 0.0006421129335268929 0.0006413873482003743; … ; 0.0006412148521673985 0.00064121441643697 … 0.000641214931890473 0.0006416319024286201; 0.0006412144141950876 0.000641214269537754 … 0.0006412322914558891 0.0006398925760897174]
- [3.205836321871972e-7 3.3646498282352276e-7 … 0.0008194461933544134 0.0007627801250002506; 3.368691022047382e-7 3.5413175146066343e-7 … 0.0008851114476459035 0.0008216706573345475; … ; 3.3658036082086804e-7 3.5363476720089033e-7 … 0.0008847864916313673 0.0008209125326670091; 3.2074559261750376e-7 3.365733849822043e-7 … 0.0008225502238552331 0.0007627648963443016;;; 0.0008218292400236738 0.0008218287964127014 … 0.0008220697934111593 0.0008229689936318501; 0.0008218300534593252 0.0008218299424405624 … 0.000822278399344254 0.0008237093776756881; … ; 0.0008218302807761034 0.0008218287983098197 … 0.0008225405823000658 0.0008200741901286701; 0.0008218295126510102 0.0008218283738615404 … 0.0008221397571636993 0.0008222387626661496;;; 0.000821154489464138 0.0008211540529720347 … 0.0008189354166010495 0.0008212231995986487; 0.0008211547589910776 0.0008211532594526956 … 0.000821233665528314 0.0008224981217680279; … ; 0.0008211546488741782 0.00082115460351001 … 0.0008230578060659179 0.000821452619417453; 0.0008211543609215175 0.0008211543099743894 … 0.0008200076494761997 0.0008200113068565699]
- [4.914915197061892e-7 5.230538802440853e-7 … 0.0010195468701120305 0.000934479719738254; 5.218589992842453e-7 5.56201142174211e-7 … 0.0011174679926004025 0.0010225159139104743; … ; 5.246420813252666e-7 5.567224520502735e-7 … 0.0011178335624928697 0.0010233393861596935; 4.921586093749579e-7 5.227032737080955e-7 … 0.0010241718367884733 0.0009348558949837105;;; 0.0010245581882222265 0.001024557752829198 … 0.0010276820485158598 0.0010274864534647848; 0.0010245582074101729 0.0010245582102519307 … 0.0010255667708383726 0.0010252248759123996; … ; 0.0010245597589015236 0.0010245570770127 … 0.0010239778562252761 0.0010233487338441893; 0.001024558311290393 0.0010245571249891882 … 0.0010258467740254056 0.0010229818387252965;;; 0.001023509230636531 0.0010235077934347616 … 0.0010213583411777618 0.0010230462853378353; 0.0010235091281159587 0.0010235084588119834 … 0.0010252959491343634 0.001026196924095132; … ; 0.0010235085767392011 0.0010235082154635494 … 0.0010248641843746322 0.0010247186718058772; 0.0010235095133010463 0.0010235072965435413 … 0.0010238563153654918 0.00102137987862189]
- [7.251916523938768e-7 7.815576253739718e-7 … 0.0012453167998323239 0.0011205704783644622; 7.794261440220919e-7 8.421724514395108e-7 … 0.0013912817618993017 0.0012476044695857056; … ; 7.822623915714671e-7 8.420434512926146e-7 … 0.0013919804676625128 0.0012477034380306514; 7.265913249822046e-7 7.791545245615495e-7 … 0.001252846464662149 0.001118954947435539;;; 0.0012526277145660171 0.0012526262067674206 … 0.0012533588349700907 0.001253887578396111; 0.0012526280717289854 0.0012526256559961158 … 0.0012515552798933664 0.0012543177902756047; … ; 0.0012526274294166937 0.0012526269947198145 … 0.0012512316550280678 0.0012512024550324847; 0.0012526277442552903 0.0012526280485850598 … 0.0012529183284629947 0.0012516322596566448;;; 0.0012510597852091695 0.0012510591802068222 … 0.0012477219634598052 0.0012506766471941312; 0.0012510587773347497 0.001251058394302397 … 0.0012538672729907599 0.0012542174355280702; … ; 0.001251060177476816 0.0012510576197412777 … 0.0012528512808411057 0.0012506743427095491; 0.0012510588634112173 0.0012510601943669056 … 0.0012479071622353195 0.0012509769760275205]
- [1.0393720325172304e-6 1.133854564959057e-6 … 0.0014978432120235548 0.0013206826891258177; 1.131190689937668e-6 1.2395775840997444e-6 … 0.001704169660186083 0.0015007069859968587; … ; 1.1318015814403746e-6 1.237608907997374e-6 … 0.0017015731620394772 0.001500468110821927; 1.0376066537828462e-6 1.1323514491426362e-6 … 0.0015044570634038727 0.0013187236547680956;;; 0.0015092043137175108 0.0015092036930417503 … 0.0015113999585313989 0.0015099445169209555; 0.0015092022260753902 0.0015092039629735618 … 0.0015121413759328327 0.001512519887547395; … ; 0.0015092065823462718 0.0015092052378544258 … 0.0015045562942994082 0.0015048074009613604; 0.0015092066402858685 0.001509206657071656 … 0.0015124727372586495 0.0015081918980348575;;; 0.0015069312659439576 0.0015069279511715353 … 0.0015063277940080566 0.001505871737413551; 0.0015069274706601117 0.0015069316828572818 … 0.0015098374808299273 0.001512227152614924; … ; 0.0015069274809441143 0.0015069255737849166 … 0.0015064971117096017 0.0015071092229256078; 0.001506932718402587 0.0015069293447892223 … 0.0015051887576216039 0.0015058776748460775]
+ [4.999999999999999e-9 4.999999999999999e-9 … 0.00010008324944642521 9.903292188949954e-5; 4.999999999999999e-9 4.999999999999999e-9 … 0.0001009260918817584 9.988970360782775e-5; … ; 4.999999999999999e-9 4.999999999999999e-9 … 0.00010105189499279101 0.0001000405946110004; 4.999999999999999e-9 4.999999999999999e-9 … 0.00010002215542791598 9.902022109246603e-5;;; 9.999999999999999e-5 9.999999999999999e-5 … 9.996276298826396e-5 9.995572156305e-5; 9.999999999999999e-5 9.999999999999999e-5 … 9.997577022539459e-5 9.996176793359349e-5; … ; 9.999999999999999e-5 9.999999999999999e-5 … 0.00010005920436798786 0.0001000187698232787; 9.999999999999999e-5 9.999999999999999e-5 … 0.00010003510498511973 0.0001000520355037907;;; 9.999e-5 9.999e-5 … 0.00010008853953585938 9.999973556549468e-5; 9.999e-5 9.999e-5 … 9.994302503040619e-5 0.0001001078839358918; … ; 9.999e-5 9.999e-5 … 9.997292087129894e-5 0.00010005669325825577; 9.999e-5 9.999e-5 … 0.0001000232990584694 0.000100026992577312]
+ [2.232838533472438e-8 2.2575442431077368e-8 … 0.00021240099044053003 0.00020817976499765115; 2.256071726475189e-8 2.2811994839120876e-8 … 0.00021712604726465694 0.00021244926426706325; … ; 2.2571141853624296e-8 2.2803779813855847e-8 … 0.0002166331411058322 0.0002125591551382798; 2.2351284963531563e-8 2.2582098004663027e-8 … 0.00021234186360563932 0.0002082444086451958;;; 0.00021249999499403695 0.0002124999950542502 … 0.00021251049760322169 0.00021263061394876626; 0.00021249999679840573 0.00021249999193397823 … 0.00021240825647319943 0.00021247704865158423; … ; 0.00021249999055806884 0.00021249999586736196 … 0.00021262698525333366 0.0002126303392021845; 0.00021249998215956364 0.00021250000138041771 … 0.0002124627307563837 0.000212704704642412;;; 0.00021245486271049605 0.00021245486474095408 … 0.00021236492853667432 0.0002121978807379108; 0.00021245484111526793 0.00021245485526557792 … 0.0002121133370943631 0.00021226975939811927; … ; 0.00021245484784509575 0.00021245483175100663 … 0.0002124735824762148 0.00021256125665473544; 0.00021245487704417356 0.00021245485899472468 … 0.00021236089783395367 0.00021244851595632604]
+ [5.6335251534148445e-8 5.738396602977146e-8 … 0.0003382767417188806 0.0003280773231179003; 5.748882461970809e-8 5.857764219246466e-8 … 0.0003504166486892322 0.00033865550269017826; … ; 5.742243185915258e-8 5.8618005056325515e-8 … 0.0003499700913782398 0.0003387473842349393; 5.63979696003938e-8 5.748823368981786e-8 … 0.0003384182274112347 0.0003278395486792216;;; 0.000339062434402322 0.0003390625130319162 … 0.0003392340173840966 0.00033923337744352976; 0.0003390624791713008 0.0003390624583291616 … 0.00033856248603762154 0.0003390503619657825; … ; 0.00033906251409605983 0.0003390624123859737 … 0.0003393714998488218 0.0003389432675105106; 0.00033906250050296785 0.00033906249021096893 … 0.0003392462551186686 0.00033921163944745973;;; 0.0003389475370743281 0.0003389476423657819 … 0.00033930444486117493 0.000339250105233586; 0.00033894751134012793 0.00033894750356964077 … 0.00033862453322417335 0.0003381833565536497; … ; 0.0003389475845130815 0.00033894756150618777 … 0.0003393863156727384 0.0003389187469390124; 0.00033894755233552034 0.00033894754020621345 … 0.000338611049680446 0.0003390440354563827]
+ [1.125639803551172e-7 1.1580629850194523e-7 … 0.00048087210737728916 0.0004588270745110726; 1.1579385665748805e-7 1.1913449585927802e-7 … 0.0005035697372435552 0.0004800093154328621; … ; 1.1580470306429124e-7 1.1934197630868808e-7 … 0.000503714835745738 0.00048111440945484534; 1.1254407894623941e-7 1.1573562180556638e-7 … 0.00048066547894821303 0.00045916517248500863;;; 0.00048144531779377675 0.0004814451425941623 … 0.0004821808926940481 0.0004819974155008236; 0.00048144532788592793 0.000481445226321923 … 0.00048072369398502 0.0004815267558266938; … ; 0.0004814450764916423 0.00048144514573222 … 0.00048230778406691514 0.000481815275029386; 0.000481445204534429 0.00048144513091040355 … 0.0004809156366558411 0.0004815224129532765;;; 0.00048121350810605964 0.00048121357750820703 … 0.00048086915774290997 0.0004821927299761549; 0.0004812136709989362 0.00048121358560312077 … 0.0004806177112961606 0.0004804380025124069; … ; 0.00048121363600821796 0.00048121368577097557 … 0.00048206652651632264 0.00048158853280839243; 0.00048121361202291485 0.0004812135586021989 … 0.00048142203241477666 0.0004813933378162875]
+ [1.9784579503019424e-7 2.055362316539568e-7 … 0.000640445634952645 0.0006025065556840189; 2.0550572108382316e-7 2.1369007481050558e-7 … 0.0006789943355069361 0.0006389158556590101; … ; 2.0552052853209671e-7 2.1380215177048585e-7 … 0.0006794456550296605 0.0006408045188306509; 1.9780578490420775e-7 2.0539281831652138e-7 … 0.0006393332289313196 0.0006035343809052133;;; 0.0006416258570851576 0.0006416258464424292 … 0.0006420140722290561 0.0006412328456089911; 0.0006416259518218361 0.0006416256114886923 … 0.0006402987934378783 0.0006412576965150806; … ; 0.000641625854661124 0.000641626462170878 … 0.0006424083408943579 0.0006417244880429185; 0.0006416257473957675 0.0006416259905098581 … 0.0006410943365880192 0.0006424708014592439;;; 0.0006412142915251128 0.0006412147962503137 … 0.0006410588775874934 0.000641594037440001; 0.0006412146893394295 0.0006412142895204726 … 0.000640193633449966 0.0006398518644600898; … ; 0.0006412142584578078 0.0006412145652458766 … 0.0006414058382002251 0.0006418331218311179; 0.0006412142923934073 0.0006412142006726971 … 0.0006415176538654711 0.0006416488717593755]
+ [3.208973796369958e-7 3.369370091104764e-7 … 0.0008191392244824279 0.0007598782493689497; 3.3634036346072054e-7 3.5432618046852106e-7 … 0.0008820100081019786 0.000817621539702265; … ; 3.3732169991878905e-7 3.5434555933164016e-7 … 0.0008846400325141588 0.0008203562294529041; 3.2117548669481494e-7 3.3663057862965205e-7 … 0.00081811885195028 0.0007600263418605733;;; 0.00082182904559827 0.0008218285641464039 … 0.0008223566595611331 0.0008214600704419477; 0.0008218285499817299 0.0008218289029499261 … 0.0008195505558494003 0.0008218616303817494; … ; 0.0008218290763880246 0.0008218295084942826 … 0.0008214716309135109 0.0008213628064273192; 0.0008218286026913774 0.0008218288158294608 … 0.0008205864918294311 0.0008229710976667529;;; 0.0008211540549720443 0.0008211546280287434 … 0.000821462112001805 0.0008209940302209487; 0.0008211546420518408 0.0008211534339588036 … 0.000821831285056885 0.0008199246319209089; … ; 0.0008211542717043488 0.0008211544854680968 … 0.0008214738469244484 0.0008228480396181665; 0.0008211539024213917 0.0008211538374970907 … 0.0008226533301318289 0.000821420720364222]
+ [4.923201696782144e-7 5.220411277808561e-7 … 0.0010190895705359045 0.0009326015993606427; 5.229316933996194e-7 5.565710403989888e-7 … 0.0011182938618995268 0.0010157697956749909; … ; 5.23993461807825e-7 5.569910078674584e-7 … 0.001118801569352867 0.0010204762286055162; 4.922041247575382e-7 5.229236444360744e-7 … 0.0010171187570139476 0.0009322228879448635;;; 0.0010245583342952467 0.0010245568224808022 … 0.001025950994161864 0.0010243111550959324; 0.0010245569613821159 0.0010245572182398474 … 0.00101920436751704 0.0010252344701723749; … ; 0.0010245571434266197 0.0010245588590970982 … 0.0010232736107164623 0.0010239324763896681; 0.0010245564622366906 0.0010245577027674854 … 0.0010221747926769545 0.0010276283758159108;;; 0.0010235082208696674 0.0010235083961460778 … 0.001026690324798259 0.001023345316021371; 0.0010235089377565276 0.0010235074599377557 … 0.0010242747569984606 0.0010228092670795475; … ; 0.001023509100948929 0.0010235095940561785 … 0.0010241191828101583 0.0010249629639794272; 0.0010235086192864132 0.0010235071892110627 … 0.0010254236045945681 0.0010220801771709588]
+ [7.264925703712864e-7 7.795652120552143e-7 … 0.0012442395639526479 0.0011188132727896435; 7.807906690803838e-7 8.425320607413814e-7 … 0.001390857020668891 0.0012407387426373357; … ; 7.796982536728029e-7 8.4222449640013e-7 … 0.0013916847671893403 0.0012458766119179247; 7.251280765369861e-7 7.813298039973522e-7 … 0.0012411649085613023 0.0011169496851569363;;; 0.0012526277786727444 0.0012526246882468648 … 0.0012533253002408107 0.0012528722084046806; 0.0012526258824704162 0.0012526256750385696 … 0.0012480371889476212 0.0012539087713889126; … ; 0.0012526280374909162 0.0012526267214457252 … 0.0012527453609842724 0.0012508967695292797; 0.0012526256924443764 0.0012526282936329025 … 0.0012504950189376669 0.0012564549472707759;;; 0.0012510587485915128 0.0012510596102741777 … 0.001254320628458978 0.0012506935943881238; 0.0012510603301776895 0.0012510589948335957 … 0.0012536260317288514 0.0012512107971634696; … ; 0.0012510606493582482 0.0012510608228938544 … 0.0012534634903929873 0.0012535008183720076; 0.0012510608067746705 0.0012510577379904035 … 0.0012551563420112296 0.0012471020653966618]
+ [1.039689246703923e-6 1.1308580494240412e-6 … 0.0014936485954840208 0.0013203838528835106; 1.1294159385720633e-6 1.238564892405395e-6 … 0.001706870391926914 0.0014934402583444554; … ; 1.126900733490704e-6 1.2399076133330468e-6 … 0.0017063270651207526 0.0014992785308074069; 1.0403807772870612e-6 1.1330980824470121e-6 … 0.001490480897837271 0.0013191437247557895;;; 0.0015092069641999093 0.0015092035566109671 … 0.001509684248985377 0.0015101310063059992; 0.001509204488084634 0.0015092023685288915 … 0.0015059440019343207 0.001507038804706964; … ; 0.001509204681283379 0.0015092024489534825 … 0.0015062136964954715 0.0015070610861063246; 0.0015092035024262948 0.0015092052255327448 … 0.0015047674120343118 0.0015151165434472923;;; 0.0015069295570337377 0.0015069303447979654 … 0.0015104941663862561 0.0015081179214125528; 0.0015069310238957194 0.0015069282573786239 … 0.0015066454859066643 0.0015047823753420602; … ; 0.0015069301208957403 0.0015069300034784628 … 0.0015080718894942872 0.0015148048035930409; 0.0015069310926901062 0.001506928186524821 … 0.0015139859013859915 0.0015025364653300148]
  ⋮
- [0.08736670118369928 0.17341319704749975 … 0.5250381184958276 0.2680632388378998; 0.17338519998679025 0.3456443978222325 … 1.0470585275594735 0.5322120279956907; … ; 0.19047679287380037 0.3756017322920311 … 1.0811066752852763 0.5377574105358297; 0.09655919535714441 0.18969653598999833 … 0.5428484969235858 0.27233548142441094;;; 1.4192216868244083 1.5072249168819316 … 1.2899629342478884 1.4444164785966316; 1.3798068150742082 1.4736431259534142 … 1.1928100242510002 1.3120228693386289; … ; 1.691213240726334 1.4593671404181812 … 1.353919846507772 1.436119816583515; 1.4016978321650924 1.3850564333080098 … 0.9375941857998698 1.1831696383065915;;; 0.5749602490824451 0.6441447091377558 … 1.0831866077701493 0.6693898457175067; 0.5396231614949256 0.5570019431884387 … 0.9562132226337521 0.8907794016791525; … ; 0.6021837609900899 0.6868432946287428 … 1.0033240107458385 1.0628081388044295; 0.6227203700200803 0.5923718249180182 … 1.0926763140133626 0.8302818660192702]
- [0.08710349841640734 0.17307237925989188 … 0.5272911768316715 0.26612899500936005; 0.17376332990076476 0.3464829201055262 … 1.0497581807891185 0.5288647943151763; … ; 0.18943842187813834 0.37441903317791525 … 1.0826423015316753 0.5437634956270898; 0.09531397204721746 0.18914171086318043 … 0.5424868829802876 0.27404723544121684;;; 1.4182938205971334 1.506664859241326 … 1.2913466203724018 1.443726184098009; 1.3782026313369433 1.47412606155334 … 1.1880472275190144 1.3155285445150613; … ; 1.6899184711331126 1.4615726501398918 … 1.3437554182855502 1.4342682894304204; 1.4016600435144102 1.3853154053261891 … 0.9429573497717645 1.184475332685606;;; 0.5753777179223104 0.6441237294823713 … 1.0793993642056474 0.6704542955522886; 0.5397470302408012 0.5607271069977668 … 0.9546751851882039 0.8896999597481721; … ; 0.6032424111467861 0.6898459530804495 … 1.016178133068384 1.0574497754073453; 0.6228115594512981 0.5934386642509522 … 1.094372935930806 0.8288104302321068]
- [0.08747844324785387 0.17367793809503726 … 0.5270092614877988 0.26691603516025386; 0.1746199224498666 0.34688474209832676 … 1.0465863542647853 0.5271523189640885; … ; 0.18860960115611106 0.3735562172001191 … 1.087656165257833 0.5450140732664427; 0.0946191599951221 0.1879078877873142 … 0.5440266539955871 0.2741756621896484;;; 1.4172364647869338 1.5066722383868456 … 1.2907683906460268 1.4434421197688172; 1.378529538642738 1.4757542607493341 … 1.1870028687685992 1.3154180058753127; … ; 1.688713248159326 1.460672087274065 … 1.3366724470585025 1.4335847355174454; 1.4028716199510702 1.3838999971927395 … 0.9419633125060313 1.184680044163902;;; 0.5760635929404727 0.6452577952580356 … 1.076025505683624 0.6706046533833677; 0.5406931874764617 0.5622788159320777 … 0.9558895065187976 0.8926295858726587; … ; 0.6050029675218731 0.6920942344299604 … 1.0153017460930691 1.0569002214001024; 0.623062654413965 0.5942642705752651 … 1.091468519519859 0.8278544186897671]
- [0.08736227831666078 0.1752005144713815 … 0.5293331302248993 0.2670277821897211; 0.17368207346278855 0.347512869840375 … 1.0516571413340754 0.5283243031543703; … ; 0.18780852359071606 0.3710718201759263 … 1.0872675547570587 0.5457537896263632; 0.09379489303823614 0.18791838567911767 … 0.5484358778063049 0.2738747995976798;;; 1.4173179894466137 1.5059762788221411 … 1.2900940830419836 1.4436370181081155; 1.3801989038576044 1.4733852358488682 … 1.1813970360931183 1.3124684125548711; … ; 1.687805146405283 1.457230375257516 … 1.3372347703858585 1.4389700839263306; 1.4036497030362904 1.3854385144923447 … 0.9425555887133834 1.1848430085776465;;; 0.5752760141939607 0.6447604156313764 … 1.0702640316895917 0.6726032542465549; 0.5408844525131763 0.5639470974650651 … 0.9642806846088012 0.8927540621069389; … ; 0.6047780849689265 0.695051631508292 … 1.013294779807932 1.0521690147234015; 0.6230393221824125 0.5922640249990998 … 1.088719982547472 0.8254493384459535]
- [0.08788186283698309 0.17634024600547488 … 0.5284842490030986 0.26798333075735487; 0.17419420935588872 0.35031992113725685 … 1.0523680433006963 0.5287315935823361; … ; 0.18775564342999868 0.368596914125291 … 1.095176218520371 0.5449175278699054; 0.09362260025875628 0.1874329993328844 … 0.5459842220726996 0.27635704407102596;;; 1.4169364517996739 1.5066685118117764 … 1.2927487271553597 1.442799445726561; 1.3798122647311188 1.4714398057880493 … 1.1931622313474122 1.3064739389330167; … ; 1.6858300571679667 1.4569044331330787 … 1.3361895353746638 1.4396554043484857; 1.4029215978300738 1.3838113783210537 … 0.9438670198818123 1.1870769186339252;;; 0.575595403500399 0.6448797772861258 … 1.0698400122543106 0.6719217037716082; 0.5426312950088316 0.5641285034742797 … 0.9690121776125313 0.8923438197648151; … ; 0.604816296082414 0.69281269827599 … 1.0155170961970554 1.0480748058744238; 0.6221545031384521 0.5938901005696045 … 1.0834385318472122 0.823802539541864]
- [0.08830323130740217 0.17564737287404894 … 0.5281791262149282 0.26810821109175936; 0.17548411381003093 0.35019349392102805 … 1.0600614384499933 0.5279053129608621; … ; 0.18807900153098553 0.36348717621901394 … 1.0978047896539287 0.5421222141040458; 0.09222398832492394 0.18669074598084998 … 0.5410628655503807 0.27932379276803754;;; 1.4169968040034286 1.5065579246777447 … 1.2932276527661306 1.4415216310105237; 1.3793120370189293 1.4682336492784016 … 1.1958376997709323 1.3104160467670969; … ; 1.682904001861255 1.4531482646880889 … 1.3322463776462312 1.441571950365919; 1.4037235222517221 1.383874888597862 … 0.951581972585349 1.1856502081608855;;; 0.5756310685715493 0.6444532061917384 … 1.07369225605024 0.6703959518394998; 0.5423824712656644 0.5681212979946015 … 0.975067008930844 0.8968478082822603; … ; 0.6027822216512674 0.6939259620895172 … 1.0173832586389937 1.0463123894013477; 0.6212930963503787 0.5935393132216582 … 1.079591844363021 0.8215717204694901]
- [0.08795907656361783 0.17338708786378154 … 0.5179240455449821 0.273116023255439; 0.17648227887442014 0.34960901693914825 … 1.0805055570620026 0.5153507518753995; … ; 0.19058896225503258 0.3575675309771922 … 1.1186133829267844 0.5317684808839713; 0.09035795595312193 0.19046216748810804 … 0.5260243165262802 0.2875305387742742;;; 1.4167426795997835 1.507305882413072 … 1.2962235542196139 1.4416256518678123; 1.37889766807062 1.4659716160416318 … 1.1902988524243865 1.3061996121025552; … ; 1.6797980136866566 1.4526206957149637 … 1.3195903661119865 1.4389227481221643; 1.4043304209105383 1.384523862904804 … 0.9562052757033404 1.1827625074641832;;; 0.5757652081179869 0.6434744748466694 … 1.0680826922628408 0.6695267417847078; 0.5436804332477768 0.5696822375233108 … 0.9843071482202594 0.8961067761386372; … ; 0.6019885941530758 0.6889242970501208 … 1.0066827706585781 1.0460330630943289; 0.6215727092678642 0.5924425438706332 … 1.0715963289647223 0.821185479176438]
- [0.08776147777539957 0.1664918217554881 … 0.4674665473276306 0.29791588260780505; 0.18077107487673286 0.3613446641896528 … 1.2057099306565122 0.45410613372030173; … ; 0.2065597687802446 0.3266098218645587 … 1.2255067269574909 0.48712460553046005; 0.08012492112873318 0.2109278369538051 … 0.4525468765980488 0.3212809099290408;;; 1.4176031322788558 1.5081478594102349 … 1.2993119054979767 1.4417300108446218; 1.3786748884914 1.4690978208763803 … 1.1901831143763346 1.2991597346870944; … ; 1.6783112008309018 1.4467575892774538 … 1.305828294380233 1.4436775592326199; 1.4046081945491615 1.3831380200792973 … 0.9618042821072821 1.1877023689211788;;; 0.5750215568289166 0.6445635201496844 … 1.0738963028386457 0.6687661588497502; 0.5454411719714714 0.5695847225827464 … 0.9917932591577272 0.8974333755578813; … ; 0.6013439566005536 0.6915444311499026 … 1.0168652506358151 1.0468199046565176; 0.6210337698185666 0.5921123136741224 … 1.0684602416557836 0.8158502186379933]
- [0.09039584570973148 0.1616873901291505 … 0.43243454061278364 0.3160059746913002; 0.17852872505967768 0.372053913543321 … 1.2755821466960626 0.4257358995102165; … ; 0.20926004382675611 0.31356463559856124 … 1.2635451693300563 0.4664466981689759; 0.07666853059990618 0.21751888798789262 … 0.4296793258035212 0.3331743536658979;;; 1.4172534765993865 1.5086948599835872 … 1.2954222184951503 1.4404846566669698; 1.378938525200995 1.4691772183582978 … 1.1933113950087142 1.2964686194312736; … ; 1.676032814663635 1.4461380238996722 … 1.3127156955607837 1.4444681563598172; 1.4049663385527138 1.3816126520809993 … 0.965565096872491 1.1902912232043912;;; 0.5750978850753682 0.6467515935313611 … 1.0733302746627986 0.6697569874211388; 0.5447642983700004 0.5710503315373737 … 0.9900985300341336 0.898700922725178; … ; 0.6004252633767995 0.6901785274420881 … 1.0078781424852217 1.0511822969974762; 0.6209970635420387 0.5933978758388893 … 1.0695139709549744 0.8142070715180463]
using Plots;
+ [0.32344585341118015 -0.23951762064002521 … 0.06686324094201931 0.5913226728955537; -0.25728606341446747 1.0853988485758699 … 2.138658974112079 -0.023997774723494573; … ; 0.2923939811822821 0.13687792536637564 … 3.0380739985748972 -0.21827588984159554; 0.04618914318086228 0.31696615055621197 … -0.3546797586202608 0.6323912135943809;;; 1.4604724009333596 1.3617789248555534 … 1.220599724611792 1.3126079404877087; 1.3437701835238176 1.2787245139108254 … 1.0826729559008723 1.1681394796513105; … ; 1.4806753256425842 1.2656120154580834 … 1.3265927231818275 1.2716749517707466; 1.4458618092975943 1.5056639422524365 … 1.2236307112065592 1.3617966959326784;;; 0.5762060684395639 0.6647588645261125 … 0.7276580692283875 0.8715381443109319; 0.6732636807710957 0.6552642343268618 … 1.0846586352969358 0.8753324647795347; … ; 0.6641352962105405 0.735937033948615 … 1.576686712979096 0.9848577955402422; 0.5586142684503784 0.6228646959380771 … 0.7468890638530763 0.6652373281915317]
+ [0.30418933786046254 -0.2051655602146413 … 0.10875865312004675 0.5663947276855692; -0.22104234006076756 1.016488512067858 … 2.0499156676324333 0.02676712461505571; … ; 0.28330833443477305 0.1572749808979783 … 2.885104990801815 -0.1503397388556197; 0.05087314962518478 0.3069818768184879 … -0.2765000114718399 0.5988608137712521;;; 1.4603760801452954 1.3618375753159375 … 1.220546114942322 1.3114710941150431; 1.344015058042396 1.2790817898577986 … 1.0796767638832607 1.1682339511076223; … ; 1.4803848282663605 1.2654243541830692 … 1.3268327793931487 1.272023747161397; 1.446029549529741 1.5060639399514646 … 1.2231183146263143 1.3622733795003796;;; 0.5768906086663067 0.6645891457657617 … 0.7275431280848484 0.8706836520538137; 0.6732149251818672 0.654911898866629 … 1.0818352814753673 0.8752362661887012; … ; 0.6641290733830695 0.7359596335577617 … 1.5762552571610777 0.9843862202843303; 0.5585523684599322 0.623227441069279 … 0.7467403254173902 0.6646016932285796]
+ [0.2847590541001498 -0.17079571570741087 … 0.150398116577435 0.5415685700540663; -0.1849843075287906 0.9501610313371808 … 1.9565845053546547 0.07715724133997642; … ; 0.2740660200916429 0.17761296172672444 … 2.72763370083821 -0.08234030893722527; 0.05545847435748615 0.2963068656732933 … -0.19870899824300947 0.5658442323593581;;; 1.4599626501886844 1.3616765891630365 … 1.2208653140811851 1.3114706903865443; 1.3443110711650763 1.2788877795554432 … 1.077806243833762 1.1682689877487562; … ; 1.4801097371754692 1.2652342974533768 … 1.3263921756581358 1.272285998475893; 1.446050714252855 1.5063319467088137 … 1.2234649963219608 1.3616143350253913;;; 0.5770265598360506 0.6646539575239541 … 0.7276046990372698 0.8707114066664939; 0.6731821730913959 0.655393511665646 … 1.0825916136708222 0.875137688974911; … ; 0.6642775965300408 0.73579218808143 … 1.5784524209901016 0.984205176009927; 0.5586166249739836 0.6231198203151365 … 0.7467472982880977 0.6652081490490135]
+ [0.2653997619107635 -0.13671892623955148 … 0.19147332653659618 0.5164925505155648; -0.14920059109604128 0.884281967260641 … 1.8673692105696842 0.12703003213497288; … ; 0.2649037190531652 0.19804872003283905 … 2.567237024291365 -0.015466672424382532; 0.05998691862562818 0.28572318638554683 … -0.12139947260183183 0.53369616896821;;; 1.4601328088162338 1.361797469816048 … 1.2207544892482836 1.3114499732548686; 1.3442879683540443 1.27885083527495 … 1.0802511366365144 1.168303736365679; … ; 1.4804469048102822 1.265527160351332 … 1.3284741319878235 1.2723404337199913; 1.4459933507781122 1.506034651272114 … 1.2236062450624785 1.3621217870811715;;; 0.5766486489387501 0.6644878231073273 … 0.727773637306194 0.8712005929333659; 0.6731095066302959 0.6556509706477014 … 1.0837009582379669 0.87517731590086; … ; 0.6641158457353242 0.7359365993604319 … 1.5759576968660411 0.9841142979492197; 0.558538005249994 0.6234430602956041 … 0.7466524269919464 0.6649738632687366]
+ [0.24665921184685424 -0.10340729144621946 … 0.23227736501558907 0.49142696179520384; -0.11412675184915237 0.8197504835184262 … 1.781386806856554 0.175861215257219; … ; 0.2554601858147967 0.21762680070660154 … 2.4158839458006045 0.0502126899019702; 0.06431111207863661 0.2753024945030557 … -0.04573953719337157 0.5027259002098928;;; 1.4595595693429777 1.3619998656148726 … 1.2210571124642284 1.311389155054479; 1.3446437617605718 1.2784197466699563 … 1.0799843493525114 1.1680496890113592; … ; 1.4801447204793763 1.2650549706916654 … 1.3299479063228825 1.2725195454083529; 1.445958144346029 1.5056281509807343 … 1.2237565880392507 1.361667884850983;;; 0.5765299381165166 0.6643085534266301 … 0.7286704972133405 0.8708428053018946; 0.6729652024381473 0.6557357449297758 … 1.0862932061191581 0.8747394582124651; … ; 0.6644951337840355 0.7362509676481178 … 1.5768668008066478 0.9839802561253483; 0.5585825524086357 0.6233025902528668 … 0.7465170126082493 0.6653529018752372]
+ [0.22814073237364757 -0.07069254180004368 … 0.27187901106264045 0.4679317108996364; -0.07968137578805536 0.754451756350473 … 1.6934123747441145 0.22376247328427287; … ; 0.24689967687121195 0.23751583856221703 … 2.263635192373891 0.1143530646716097; 0.06867548304261833 0.26520400023114676 … 0.0282357722985481 0.47281866323701366;;; 1.4596488782771522 1.362114081786221 … 1.2213862591549247 1.3123262430261045; 1.3450454056783632 1.2767483370984678 … 1.0820500547576386 1.167551237946885; … ; 1.4801218720379232 1.2647442525896617 … 1.3272400599122516 1.2726752386115796; 1.4459892073477338 1.5060906448588243 … 1.2238085332465156 1.3624447765888459;;; 0.5766338464791239 0.6642941618358963 … 0.7289235845916426 0.8715224876124494; 0.6727325364170772 0.6559158855816698 … 1.087628485210529 0.8743553258758565; … ; 0.6644930459820826 0.7371246189798936 … 1.5801016326239674 0.9836809849666489; 0.5585413568229775 0.6231155307581339 … 0.7464659862274282 0.664832565949782]
+ [0.20953820559566846 -0.03886664102345573 … 0.3104074080559404 0.44402796963331814; -0.046248387968491414 0.6950002952416936 … 1.604317341431067 0.2702799807385083; … ; 0.23847101937113047 0.2561876077902607 … 2.1151811080642076 0.17700933587904377; 0.07280264408065075 0.2545563336239306 … 0.10035297376260609 0.4423012279993629;;; 1.459630125475009 1.3622586717645444 … 1.2210290990641122 1.3126840141263394; 1.3453431403558844 1.2765499120980863 … 1.0841116606018106 1.167946318811792; … ; 1.4801032730361023 1.2646267439460808 … 1.323190061760972 1.2732872340870016; 1.445924369517254 1.5055180543269513 … 1.2238932792480657 1.36130385277041;;; 0.5769573478613021 0.6641750603902891 … 0.7291718145322383 0.8725641447974142; 0.67271992788143 0.6552387881034193 … 1.089325851877671 0.874390564693153; … ; 0.6643061749150101 0.737351314967353 … 1.579500967677233 0.9836787118167063; 0.5586174556391423 0.6234857694256554 … 0.7462475565208786 0.6643261175645996]
+ [0.19204427730754067 -0.008245301696898822 … 0.3479088882866831 0.4205692322121622; -0.013890310415565398 0.6370551603321933 … 1.5194317090196277 0.31507434305245197; … ; 0.2308336421009619 0.27383707565283155 … 1.9721569754109567 0.2364014750065411; 0.0765518977287731 0.24487318454282847 … 0.16951973448972016 0.41441146082614594;;; 1.459644270085159 1.3624075617675975 … 1.221030974728409 1.3122419991441272; 1.3454419358498253 1.2754136076932163 … 1.0835411455421033 1.168552748695868; … ; 1.479856465065785 1.2636321404349375 … 1.3249511508930605 1.273224765132956; 1.4459132914438704 1.50579036057827 … 1.223810907048622 1.3619729095312507;;; 0.5776169481681934 0.6640700857172435 … 0.7300069527355414 0.8725745581988216; 0.6726558606267862 0.6551715375322822 … 1.087508773335211 0.874327890448544; … ; 0.6643551885900297 0.7374772715205247 … 1.5813516094878808 0.9839479399964407; 0.5585689278567005 0.6224113042096956 … 0.7457816988029178 0.6643876941518719]
+ [0.1865064237793233 0.0015303133019822163 … 0.3597055966011904 0.41385816070224796; -0.0035393377573019707 0.6184368498851976 … 1.4956625855148564 0.32961104631086524; … ; 0.22841804534750018 0.27970230839493343 … 1.923774735516094 0.25560274882602996; 0.07777958686121672 0.24152492942888684 … 0.19178481279997067 0.40532064852122385;;; 1.4596211486726465 1.362431973025947 … 1.221358184981517 1.312532607592047; 1.3454590435992522 1.27590257779994 … 1.0849847303243434 1.1687767426888362; … ; 1.4798187465035841 1.2637711786965353 … 1.3238364848399025 1.2729911163035832; 1.4459664354581956 1.5059194972193117 … 1.2237571844170565 1.3620482434212504;;; 0.5774699044521963 0.6640469416725793 … 0.7297938618710633 0.8727785262201058; 0.6726215209423774 0.6552703276624501 … 1.0891311586550914 0.8742524779630028; … ; 0.6644115969762235 0.737637092554923 … 1.5833118991705337 0.9840692279717137; 0.5584998290730477 0.6223658753619129 … 0.7457366941705864 0.6641358587220605]
using Plots;
 gr();
 
 # Use `Array` to transform the result back into a CPU-based `Array` for plotting
 p1 = surface(X, Y, Array(sol[end][:, :, 1]), title = "[A]")
 p2 = surface(X, Y, Array(sol[end][:, :, 2]), title = "[B]")
 p3 = surface(X, Y, Array(sol[end][:, :, 3]), title = "[C]")
-plot(p1, p2, p3, layout = grid(3, 1))
Example block output

We can see the cool effect that diffusion dampens the noise in [A] but is unable to dampen the noise in [B] which results in a very noisy [C]. The stiff SPDE takes much longer to solve even using high order plus adaptivity because stochastic problems are just that much more difficult (current research topic is to make new algorithms for this!). It gets GPU'd just by using CuArray like before. But there we go: solving systems of stochastic PDEs using high order adaptive algorithms with within-method GPU parallelism. That's gotta be a first? The cool thing is that nobody ever had to implement the GPU-parallelism either, it just exists by virtue of the Julia type system.

(Note: We can also use one of the SROCK methods for better performance here, but they will require a choice of dt. This is left to the reader to try.)

Note

This can take a while to solve! An explicit Runge-Kutta algorithm isn't necessarily great here, though to use a stiff solver on a problem of this size requires once again smartly choosing sparse linear solvers. The high order adaptive method is pretty much necessary though, since something like Euler-Maruyama is simply not stable enough to solve this at a reasonable dt. Also, the current algorithms are not so great at handling this problem. Good thing there's a publication coming along with some new stuff...

+plot(p1, p2, p3, layout = grid(3, 1))Example block output

We can see the cool effect that diffusion dampens the noise in [A] but is unable to dampen the noise in [B] which results in a very noisy [C]. The stiff SPDE takes much longer to solve even using high order plus adaptivity because stochastic problems are just that much more difficult (current research topic is to make new algorithms for this!). It gets GPU'd just by using CuArray like before. But there we go: solving systems of stochastic PDEs using high order adaptive algorithms with within-method GPU parallelism. That's gotta be a first? The cool thing is that nobody ever had to implement the GPU-parallelism either, it just exists by virtue of the Julia type system.

(Note: We can also use one of the SROCK methods for better performance here, but they will require a choice of dt. This is left to the reader to try.)

Note

This can take a while to solve! An explicit Runge-Kutta algorithm isn't necessarily great here, though to use a stiff solver on a problem of this size requires once again smartly choosing sparse linear solvers. The high order adaptive method is pretty much necessary though, since something like Euler-Maruyama is simply not stable enough to solve this at a reasonable dt. Also, the current algorithms are not so great at handling this problem. Good thing there's a publication coming along with some new stuff...

diff --git a/dev/showcase/massively_parallel_gpu/index.html b/dev/showcase/massively_parallel_gpu/index.html index 9798e613357..7dc730e1089 100644 --- a/dev/showcase/massively_parallel_gpu/index.html +++ b/dev/showcase/massively_parallel_gpu/index.html @@ -23,4 +23,4 @@ sol = solve(monteprob, Tsit5(), EnsembleThreads(), trajectories = 10_000, saveat = 1.0f0)
EnsembleSolution Solution of length 10000 with uType:
 SciMLBase.ODESolution{Float32, 2, Vector{StaticArraysCore.SVector{3, Float32}}, Nothing, Nothing, Vector{Float32}, Vector{Vector{StaticArraysCore.SVector{3, Float32}}}, SciMLBase.ODEProblem{StaticArraysCore.SVector{3, Float32}, Tuple{Float32, Float32}, false, StaticArraysCore.SVector{3, Float32}, SciMLBase.ODEFunction{false, SciMLBase.AutoSpecialize, typeof(Main.lorenz), LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, OrdinaryDiffEq.Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{SciMLBase.ODEFunction{false, SciMLBase.AutoSpecialize, typeof(Main.lorenz), LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Vector{StaticArraysCore.SVector{3, Float32}}, Vector{Float32}, Vector{Vector{StaticArraysCore.SVector{3, Float32}}}, OrdinaryDiffEq.Tsit5ConstantCache}, DiffEqBase.Stats, Nothing}

Taking the Ensemble to the GPU

Now uhh, we just change EnsembleThreads() to EnsembleGPUArray()

sol = solve(monteprob, Tsit5(), EnsembleGPUArray(CUDA.CUDABackend()), trajectories = 10_000, saveat = 1.0f0)
EnsembleSolution Solution of length 10000 with uType:
 SciMLBase.ODESolution{Float32, 2, uType, Nothing, Nothing, Vector{Float32}, rateType, SciMLBase.ODEProblem{StaticArraysCore.SVector{3, Float32}, Tuple{Float32, Float32}, false, StaticArraysCore.SVector{3, Float32}, SciMLBase.ODEFunction{false, SciMLBase.AutoSpecialize, typeof(Main.lorenz), LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, OrdinaryDiffEq.Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, IType, DiffEqBase.Stats, Nothing} where {uType, rateType, IType}

Or for a more efficient version, EnsembleGPUKernel(). But that requires special solvers, so we also change to GPUTsit5().

sol = solve(monteprob, GPUTsit5(), EnsembleGPUKernel(CUDA.CUDABackend()), trajectories = 10_000)
EnsembleSolution Solution of length 10000 with uType:
-SciMLBase.ODESolution{Float32, 2, uType, Nothing, Nothing, tType, Nothing, P, A, IType, Nothing, Nothing} where {uType, tType, P, A, IType}

Okay, so that was anticlimactic, but that's the point: if it were harder than that, it wouldn't be automatic! Now go check out DiffEqGPU.jl's documentation for more details, that's the end of our show.

+SciMLBase.ODESolution{Float32, 2, uType, Nothing, Nothing, tType, Nothing, P, A, IType, Nothing, Nothing} where {uType, tType, P, A, IType}

Okay, so that was anticlimactic, but that's the point: if it were harder than that, it wouldn't be automatic! Now go check out DiffEqGPU.jl's documentation for more details, that's the end of our show.

diff --git a/dev/showcase/missing_physics/index.html b/dev/showcase/missing_physics/index.html index ed7ce09b9aa..242abd5a2ac 100644 --- a/dev/showcase/missing_physics/index.html +++ b/dev/showcase/missing_physics/index.html @@ -224,8 +224,8 @@ \varphi_{1 4} =& u_2^{3} u_1 \\ \varphi_{1 5} =& u_2^{4} \end{align} - \]

Now let's define our DataDrivenProblems for the sparse regressions. To assess the capability of the sparse regression, we will look at 3 cases:

To define the full problem, we need to define a DataDrivenProblem that has the time series of the solution X, the time points of the solution t, and the derivative at each time point of the solution, obtained by the ODE solution's interpolation. We can just use an interpolation to get the derivative:

full_problem = ContinuousDataDrivenProblem(Xₙ, t)
Continuous DataDrivenProblem{Float64} ##DDProblem#191654 in 2 dimensions and 21 samples

Now for the other two symbolic regressions, we are regressing input/outputs of the missing terms, and thus we directly define the datasets as the input/output mappings like:

ideal_problem = DirectDataDrivenProblem(X̂, Ȳ)
-nn_problem = DirectDataDrivenProblem(X̂, Ŷ)
Direct DataDrivenProblem{Float64} ##DDProblem#191656 in 2 dimensions and 41 samples

Let's solve the data-driven problems using sparse regression. We will use the ADMM method, which requires we define a set of shrinking cutoff values λ, and we do this like:

λ = exp10.(-3:0.01:3)
+ \]

Now let's define our DataDrivenProblems for the sparse regressions. To assess the capability of the sparse regression, we will look at 3 cases:

To define the full problem, we need to define a DataDrivenProblem that has the time series of the solution X, the time points of the solution t, and the derivative at each time point of the solution, obtained by the ODE solution's interpolation. We can just use an interpolation to get the derivative:

full_problem = ContinuousDataDrivenProblem(Xₙ, t)
Continuous DataDrivenProblem{Float64} ##DDProblem#198122 in 2 dimensions and 21 samples

Now for the other two symbolic regressions, we are regressing input/outputs of the missing terms, and thus we directly define the datasets as the input/output mappings like:

ideal_problem = DirectDataDrivenProblem(X̂, Ȳ)
+nn_problem = DirectDataDrivenProblem(X̂, Ŷ)
Direct DataDrivenProblem{Float64} ##DDProblem#198124 in 2 dimensions and 41 samples

Let's solve the data-driven problems using sparse regression. We will use the ADMM method, which requires we define a set of shrinking cutoff values λ, and we do this like:

λ = exp10.(-3:0.01:3)
 opt = ADMM(λ)
DataDrivenSparse.ADMM{Vector{Float64}, Float64}([0.001, 0.0010232929922807535, 0.0010471285480508996, 0.001071519305237606, 0.0010964781961431851, 0.001122018454301963, 0.0011481536214968829, 0.001174897554939529, 0.001202264434617413, 0.0012302687708123812  …  812.8305161640995, 831.7637711026708, 851.1380382023768, 870.9635899560806, 891.2509381337459, 912.0108393559096, 933.2543007969915, 954.992586021436, 977.2372209558112, 1000.0], 1.0)

This is one of many methods for sparse regression, consult the DataDrivenDiffEq.jl documentation for more information on the algorithm choices. Taking this, let's solve each of the sparse regressions:

options = DataDrivenCommonOptions(maxiters = 10_000,
                                   normalize = DataNormalization(ZScoreTransform),
                                   selector = bic, digits = 1,
@@ -270,7 +270,7 @@
     println(eqs)
     println(get_parameter_map(eqs))
     println()
-end
Model ##Basis#191657 with 2 equations
+end
Model ##Basis#198125 with 2 equations
 States : u[1] u[2]
 Parameters : 6
 Independent variable: t
@@ -280,7 +280,7 @@
 
 Pair{SymbolicUtils.BasicSymbolic{Real}, Float64}[p₁ => 0.4, p₂ => -0.3, p₃ => 0.1, p₄ => -0.1, p₅ => 0.1, p₆ => -1.0]
 
-Model ##Basis#191661 with 2 equations
+Model ##Basis#198129 with 2 equations
 States : u[1] u[2]
 Parameters : p₁ p₂
 Independent variable: t
@@ -290,7 +290,7 @@
 
 Pair{SymbolicUtils.BasicSymbolic{Real}, Float64}[p₁ => -0.8, p₂ => 0.7]
 
-Model ##Basis#191665 with 2 equations
+Model ##Basis#198133 with 2 equations
 States : u[1] u[2]
 Parameters : p₁ p₂ p₃ p₄
 Independent variable: t
@@ -360,4 +360,4 @@
 annotate!([(1.5, 13, text("Training \nData", 10, :center, :top, :black, "Helvetica"))])
 l = @layout [grid(1, 2)
              grid(1, 1)]
-plot(p1, p2, p3, layout = l)
Example block output +plot(p1, p2, p3, layout = l)
Example block output diff --git a/dev/showcase/ode_types/index.html b/dev/showcase/ode_types/index.html index 21dc05a11f5..60787d67375 100644 --- a/dev/showcase/ode_types/index.html +++ b/dev/showcase/ode_types/index.html @@ -305,4 +305,4 @@ prob = ODEProblem(simplependulum, u₀, tspan) sol = solve(prob, Tsit5(), reltol = 1e-6) -plot(sol.t, getindex.(sol.u, 2), label = "Numerical")Example block output

Warning about Linear Uncertainty Propagation

Measurements.jl uses linear uncertainty propagation, which has an error associated with it. MonteCarloMeasurements.jl has a page which showcases where this method can lead to incorrect uncertainty measurements. Thus for more nonlinear use cases, it's suggested that one uses one of the more powerful UQ methods, such as:

Basically, types can make the algorithm you want to run exceedingly simple to do, but make sure it's the correct algorithm!

+plot(sol.t, getindex.(sol.u, 2), label = "Numerical")Example block output

Warning about Linear Uncertainty Propagation

Measurements.jl uses linear uncertainty propagation, which has an error associated with it. MonteCarloMeasurements.jl has a page which showcases where this method can lead to incorrect uncertainty measurements. Thus for more nonlinear use cases, it's suggested that one uses one of the more powerful UQ methods, such as:

Basically, types can make the algorithm you want to run exceedingly simple to do, but make sure it's the correct algorithm!

diff --git a/dev/showcase/optimization_under_uncertainty/2151916d.svg b/dev/showcase/optimization_under_uncertainty/2151916d.svg new file mode 100644 index 00000000000..38f38f5a29a --- /dev/null +++ b/dev/showcase/optimization_under_uncertainty/2151916d.svg @@ -0,0 +1,150 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/optimization_under_uncertainty/2edab544.svg b/dev/showcase/optimization_under_uncertainty/479b0d2a.svg similarity index 58% rename from dev/showcase/optimization_under_uncertainty/2edab544.svg rename to dev/showcase/optimization_under_uncertainty/479b0d2a.svg index e2a47b7ddfd..c6ea4beeac8 100644 --- a/dev/showcase/optimization_under_uncertainty/2edab544.svg +++ b/dev/showcase/optimization_under_uncertainty/479b0d2a.svg @@ -43,506 +43,506 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/optimization_under_uncertainty/6961aa12.svg b/dev/showcase/optimization_under_uncertainty/6961aa12.svg new file mode 100644 index 00000000000..8bc0a4b6f58 --- /dev/null +++ b/dev/showcase/optimization_under_uncertainty/6961aa12.svg @@ -0,0 +1,150 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/optimization_under_uncertainty/d7121d2c.svg b/dev/showcase/optimization_under_uncertainty/d7121d2c.svg deleted file mode 100644 index 8cfb9d7510f..00000000000 --- a/dev/showcase/optimization_under_uncertainty/d7121d2c.svg +++ /dev/null @@ -1,150 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev/showcase/optimization_under_uncertainty/e1bcaad7.svg b/dev/showcase/optimization_under_uncertainty/dc9b674c.svg similarity index 63% rename from dev/showcase/optimization_under_uncertainty/e1bcaad7.svg rename to dev/showcase/optimization_under_uncertainty/dc9b674c.svg index 29194d5076f..4d28db43f56 100644 --- a/dev/showcase/optimization_under_uncertainty/e1bcaad7.svg +++ b/dev/showcase/optimization_under_uncertainty/dc9b674c.svg @@ -43,506 +43,506 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/showcase/optimization_under_uncertainty/e376804d.svg b/dev/showcase/optimization_under_uncertainty/e376804d.svg deleted file mode 100644 index 4642b0bd592..00000000000 --- a/dev/showcase/optimization_under_uncertainty/e376804d.svg +++ /dev/null @@ -1,150 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dev/showcase/optimization_under_uncertainty/index.html b/dev/showcase/optimization_under_uncertainty/index.html index 4bc84d2a0cc..f9e13029170 100644 --- a/dev/showcase/optimization_under_uncertainty/index.html +++ b/dev/showcase/optimization_under_uncertainty/index.html @@ -52,7 +52,7 @@ scatter!([25], [25], marker = :star, ms = 10, label = nothing, c = :green) plot!(sol, vars = (1, 3), label = nothing, lw = 3, c = :black, ls = :dash) xlims!(0.0, 27.5) -endExample block output

Here, we plot the first 350 Monte Carlo simulations along with the trajectory corresponding to the mean of the distribution (dashed line).

We now wish to compute the expected squared impact distance from the star. This is called an “observation” of our system or an “observable” of interest.

We define this observable as

obs(sol, p) = abs2(sol[3, end] - 25)
obs (generic function with 1 method)

With the observable defined, we can compute the expected squared miss distance from our Monte Carlo simulation results as

mean_ensemble = mean([obs(sol, p) for sol in ensemblesol])
40.19701011207744

Alternatively, we can use the Koopman() algorithm in SciMLExpectations.jl to compute this expectation much more efficiently as

using SciMLExpectations
+end
Example block output

Here, we plot the first 350 Monte Carlo simulations along with the trajectory corresponding to the mean of the distribution (dashed line).

We now wish to compute the expected squared impact distance from the star. This is called an “observation” of our system or an “observable” of interest.

We define this observable as

obs(sol, p) = abs2(sol[3, end] - 25)
obs (generic function with 1 method)

With the observable defined, we can compute the expected squared miss distance from our Monte Carlo simulation results as

mean_ensemble = mean([obs(sol, p) for sol in ensemblesol])
38.08665175925651

Alternatively, we can use the Koopman() algorithm in SciMLExpectations.jl to compute this expectation much more efficiently as

using SciMLExpectations
 gd = GenericDistribution(cor_dist)
 h(x, u, p) = u, [p[1]; x[1]]
 sm = SystemMap(prob, Tsit5(), callback = cbs)
@@ -92,7 +92,7 @@
     scatter!([25], [25], marker = :star, ms = 10, label = nothing, c = :green)
     ylims!(0.0, 50.0)
     xlims!(minx[1], 27.5)
-end
Example block output

Looks pretty good! But, how long did it take? Let's benchmark.

@time solve(opt_prob, optimizer)
u: 3-element Vector{Float64}:
+end
Example block output

Looks pretty good! But, how long did it take? Let's benchmark.

@time solve(opt_prob, optimizer)
u: 3-element Vector{Float64}:
   0.0
   2.4428947026478425
  49.20927899180528

Not bad for bound constrained optimization under uncertainty of a hybrid system!

Probabilistic Constraints

With this approach, we can also consider probabilistic constraints. Let us now consider a wall at $x=20$ with height 25.

constraint = [20.0, 25.0]
@@ -128,7 +128,7 @@
     scatter!([25], [25], marker = :star, ms = 10, label = nothing, c = :green)
     ylims!(0.0, 50.0)
     xlims!(minx[1], 27.5)
-end
Example block output

That doesn't look good!

We now need a second observable for the system. To compute a probability of impact, we use an indicator function for if a trajectory impacts the wall. In other words, this functions returns 1 if the trajectory hits the wall and 0 otherwise.

function constraint_obs(sol, p)
+end
Example block output

That doesn't look good!

We now need a second observable for the system. To compute a probability of impact, we use an indicator function for if a trajectory impacts the wall. In other words, this functions returns 1 if the trajectory hits the wall and 0 otherwise.

function constraint_obs(sol, p)
     sol((constraint[1] - sol[1, 1]) / sol[2, 1])[3] <= constraint[2] ? one(sol[1, end]) :
     zero(sol[1, end])
 end
constraint_obs (generic function with 1 method)

Using the previously computed optimal initial conditions, let's compute the probability of hitting this wall

sm = SystemMap(remake(prob, u0 = make_u0(minx)), Tsit5(), callback = cbs)
@@ -170,4 +170,4 @@
     scatter!([25], [25], marker = :star, ms = 10, label = nothing, c = :green)
     ylims!(0.0, 50.0)
     xlims!(minx[1], 27.5)
-end
Example block output +endExample block output diff --git a/dev/showcase/pinngpu/index.html b/dev/showcase/pinngpu/index.html index baa79e4ee83..c9636a31816 100644 --- a/dev/showcase/pinngpu/index.html +++ b/dev/showcase/pinngpu/index.html @@ -47,18 +47,18 @@ Dense(inner, inner, Lux.σ), Dense(inner, 1)) ps = Lux.setup(Random.default_rng(), chain)[1] -ps = ps |> ComponentArray
ComponentVector{Float32}(layer_1 = (weight = Float32[0.168495 0.36922827 0.3521497; 0.3923745 0.45170864 0.18639652; … ; 0.15425393 -0.13177693 0.155207; 0.24559489 -0.31787905 -0.009146501], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_2 = (weight = Float32[-0.17702325 -0.17150474 … -0.121579416 -0.28588483; -0.11315592 0.30163032 … -0.3412265 0.23782109; … ; -0.30701798 -0.29492706 … -0.010992027 -0.23055129; -0.27509087 0.15519495 … -0.024903756 -0.07276576], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_3 = (weight = Float32[-0.1896281 0.15543921 … -0.19876592 0.09266969; -0.04697081 0.08037529 … -0.026331788 0.14332974; … ; -0.100023635 -0.33507177 … -0.28786263 -0.2714041; 0.07835252 0.33540758 … -0.27549854 -0.29524502], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_4 = (weight = Float32[-0.24556871 -0.0048260586 … 0.006028826 0.10213358; -0.12490856 -0.082745515 … -0.19247425 -0.259311; … ; 0.15512604 0.038415745 … -0.29552966 0.2829067; 0.037604995 -0.12352852 … -0.16023183 -0.20111059], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_5 = (weight = Float32[0.46003082 0.38193703 … 0.17189021 0.43954903], bias = Float32[0.0;;]))

Step 4: Place it on the GPU.

Just plop it on that sucker. We must ensure that our initial parameters for the neural network are on the GPU. If that is done, then the internal computations will all take place on the GPU. This is done by using the gpu function on the initial parameters, like:

ps = ps |> gpu .|> Float64
ComponentVector{Float64}(layer_1 = (weight = [0.16849499940872192 0.3692282736301422 0.35214969515800476; 0.3923744857311249 0.4517086446285248 0.186396524310112; … ; 0.15425392985343933 -0.13177692890167236 0.15520699322223663; 0.24559488892555237 -0.3178790509700775 -0.009146501310169697], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_2 = (weight = [-0.177023246884346 -0.17150473594665527 … -0.12157941609621048 -0.285884827375412; -0.11315591633319855 0.30163031816482544 … -0.3412264883518219 0.2378210872411728; … ; -0.307017982006073 -0.29492706060409546 … -0.010992026887834072 -0.23055128753185272; -0.27509087324142456 0.155194953083992 … -0.024903755635023117 -0.0727657601237297], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_3 = (weight = [-0.18962809443473816 0.15543921291828156 … -0.19876591861248016 0.09266968816518784; -0.04697081074118614 0.0803752914071083 … -0.026331787928938866 0.14332973957061768; … ; -0.10002363473176956 -0.33507177233695984 … -0.287862628698349 -0.27140408754348755; 0.07835251837968826 0.3354075849056244 … -0.27549853920936584 -0.2952450215816498], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_4 = (weight = [-0.24556870758533478 -0.004826058633625507 … 0.006028825882822275 0.10213357955217361; -0.12490855902433395 -0.0827455148100853 … -0.19247424602508545 -0.2593109905719757; … ; 0.15512603521347046 0.03841574490070343 … -0.29552966356277466 0.282906711101532; 0.03760499507188797 -0.12352851778268814 … -0.1602318286895752 -0.2011105865240097], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_5 = (weight = [0.46003082394599915 0.38193702697753906 … 0.17189021408557892 0.4395490288734436], bias = [0.0;;]))

Step 5: Discretize the PDE via a PINN Training Strategy

strategy = GridTraining(0.05)
+ps = ps |> ComponentArray
ComponentVector{Float32}(layer_1 = (weight = Float32[0.2846045 0.4626125 0.35224664; -0.210325 -0.31796068 -0.2070085; … ; -0.18438643 -0.4140816 0.33901262; -0.33640644 0.29581606 -0.14560589], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_2 = (weight = Float32[0.0037049737 -0.15865514 … -0.25880608 0.33901015; 0.12599942 0.13703315 … -0.16942821 0.2259272; … ; -0.1685831 0.26894385 … -0.24228524 -0.074528985; 0.06728649 -0.13477182 … -0.34034616 0.25224003], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_3 = (weight = Float32[-0.04346889 -0.20687875 … 0.17856917 0.10428816; -0.31185412 0.32930806 … 0.3097842 -0.016553968; … ; 0.14109649 -0.18740183 … 0.17585425 -0.17068012; 0.023780316 0.120727 … -0.31742254 0.19054234], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_4 = (weight = Float32[0.12265144 0.1491264 … 0.32562742 -0.08398975; -0.26046348 0.22796369 … 0.3384291 -0.011929554; … ; -0.20985657 0.33481205 … -0.018651646 -0.181803; 0.02474039 -0.16410148 … -0.015463937 0.08654452], bias = Float32[0.0; 0.0; … ; 0.0; 0.0;;]), layer_5 = (weight = Float32[0.25059775 -0.30819434 … -0.38613957 -0.46180046], bias = Float32[0.0;;]))

Step 4: Place it on the GPU.

Just plop it on that sucker. We must ensure that our initial parameters for the neural network are on the GPU. If that is done, then the internal computations will all take place on the GPU. This is done by using the gpu function on the initial parameters, like:

ps = ps |> gpu .|> Float64
ComponentVector{Float64}(layer_1 = (weight = [0.2846044898033142 0.462612509727478 0.35224664211273193; -0.21032500267028809 -0.3179606795310974 -0.207008495926857; … ; -0.18438643217086792 -0.4140816032886505 0.33901262283325195; -0.3364064395427704 0.2958160638809204 -0.1456058919429779], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_2 = (weight = [0.003704973729327321 -0.15865513682365417 … -0.2588060796260834 0.3390101492404938; 0.12599942088127136 0.137033149600029 … -0.16942821443080902 0.2259272038936615; … ; -0.16858309507369995 0.2689438462257385 … -0.24228523671627045 -0.07452898472547531; 0.06728649139404297 -0.13477182388305664 … -0.34034615755081177 0.25224003195762634], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_3 = (weight = [-0.043468888849020004 -0.20687875151634216 … 0.17856916785240173 0.10428816080093384; -0.31185412406921387 0.32930806279182434 … 0.3097842037677765 -0.01655396819114685; … ; 0.14109648764133453 -0.18740183115005493 … 0.17585425078868866 -0.17068012058734894; 0.02378031611442566 0.12072700262069702 … -0.3174225389957428 0.1905423402786255], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_4 = (weight = [0.12265144288539886 0.14912639558315277 … 0.3256274163722992 -0.08398974686861038; -0.2604634761810303 0.22796368598937988 … 0.3384290933609009 -0.011929553933441639; … ; -0.2098565697669983 0.3348120450973511 … -0.018651645630598068 -0.18180300295352936; 0.024740390479564667 -0.1641014814376831 … -0.015463937073946 0.08654452115297318], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_5 = (weight = [0.25059774518013 -0.3081943392753601 … -0.38613957166671753 -0.4618004560470581], bias = [0.0;;]))

Step 5: Discretize the PDE via a PINN Training Strategy

strategy = GridTraining(0.05)
 discretization = PhysicsInformedNN(chain,
                                    strategy,
                                    init_params = ps)
 prob = discretize(pde_system, discretization)
OptimizationProblem. In-place: true
-u0: ComponentVector{Float64}(layer_1 = (weight = [0.16849499940872192 0.3692282736301422 0.35214969515800476; 0.3923744857311249 0.4517086446285248 0.186396524310112; … ; 0.15425392985343933 -0.13177692890167236 0.15520699322223663; 0.24559488892555237 -0.3178790509700775 -0.009146501310169697], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_2 = (weight = [-0.177023246884346 -0.17150473594665527 … -0.12157941609621048 -0.285884827375412; -0.11315591633319855 0.30163031816482544 … -0.3412264883518219 0.2378210872411728; … ; -0.307017982006073 -0.29492706060409546 … -0.010992026887834072 -0.23055128753185272; -0.27509087324142456 0.155194953083992 … -0.024903755635023117 -0.0727657601237297], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_3 = (weight = [-0.18962809443473816 0.15543921291828156 … -0.19876591861248016 0.09266968816518784; -0.04697081074118614 0.0803752914071083 … -0.026331787928938866 0.14332973957061768; … ; -0.10002363473176956 -0.33507177233695984 … -0.287862628698349 -0.27140408754348755; 0.07835251837968826 0.3354075849056244 … -0.27549853920936584 -0.2952450215816498], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_4 = (weight = [-0.24556870758533478 -0.004826058633625507 … 0.006028825882822275 0.10213357955217361; -0.12490855902433395 -0.0827455148100853 … -0.19247424602508545 -0.2593109905719757; … ; 0.15512603521347046 0.03841574490070343 … -0.29552966356277466 0.282906711101532; 0.03760499507188797 -0.12352851778268814 … -0.1602318286895752 -0.2011105865240097], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_5 = (weight = [0.46003082394599915 0.38193702697753906 … 0.17189021408557892 0.4395490288734436], bias = [0.0;;]))

Step 6: Solve the Optimization Problem

callback = function (p, l)
+u0: ComponentVector{Float64}(layer_1 = (weight = [0.2846044898033142 0.462612509727478 0.35224664211273193; -0.21032500267028809 -0.3179606795310974 -0.207008495926857; … ; -0.18438643217086792 -0.4140816032886505 0.33901262283325195; -0.3364064395427704 0.2958160638809204 -0.1456058919429779], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_2 = (weight = [0.003704973729327321 -0.15865513682365417 … -0.2588060796260834 0.3390101492404938; 0.12599942088127136 0.137033149600029 … -0.16942821443080902 0.2259272038936615; … ; -0.16858309507369995 0.2689438462257385 … -0.24228523671627045 -0.07452898472547531; 0.06728649139404297 -0.13477182388305664 … -0.34034615755081177 0.25224003195762634], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_3 = (weight = [-0.043468888849020004 -0.20687875151634216 … 0.17856916785240173 0.10428816080093384; -0.31185412406921387 0.32930806279182434 … 0.3097842037677765 -0.01655396819114685; … ; 0.14109648764133453 -0.18740183115005493 … 0.17585425078868866 -0.17068012058734894; 0.02378031611442566 0.12072700262069702 … -0.3174225389957428 0.1905423402786255], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_4 = (weight = [0.12265144288539886 0.14912639558315277 … 0.3256274163722992 -0.08398974686861038; -0.2604634761810303 0.22796368598937988 … 0.3384290933609009 -0.011929553933441639; … ; -0.2098565697669983 0.3348120450973511 … -0.018651645630598068 -0.18180300295352936; 0.024740390479564667 -0.1641014814376831 … -0.015463937073946 0.08654452115297318], bias = [0.0; 0.0; … ; 0.0; 0.0;;]), layer_5 = (weight = [0.25059774518013 -0.3081943392753601 … -0.38613957166671753 -0.4618004560470581], bias = [0.0;;]))

Step 6: Solve the Optimization Problem

callback = function (p, l)
     println("Current loss is: $l")
     return false
 end
 
-res = Optimization.solve(prob, Adam(0.01); callback = callback, maxiters = 2500);
u: ComponentVector{Float64}(layer_1 = (weight = [-1.6554145297135205 -0.1798758511642023 -0.2066233919148985; 2.9513541171762854 0.3793222333097972 0.3589231810104084; … ; 0.8201329601564367 0.8428368854859324 1.073244554624327; 2.7565277362460576 -0.013536064691180648 0.12599278373139847], bias = [0.26964452234530484; -0.6233313079135531; … ; 0.00426921566241296; -1.8795428663605247;;]), layer_2 = (weight = [0.4380616389869381 -1.2362929302593582 … -0.27567150570349835 -0.373127101897352; 0.8656428722940742 -0.47677963737021906 … -0.5760427686256318 -0.6619596763655837; … ; -1.1538818680440837 0.3733112521318252 … 0.02687130098747139 -0.41232698061147177; -0.09562696920247252 0.9067358545432386 … -0.03956965116580724 -0.5218264552189588], bias = [0.30658932554926005; -0.30971775805785706; … ; 0.0521964805688944; 0.22057748213398645;;]), layer_3 = (weight = [0.048698572154183745 0.6379424514044416 … -4.510280556259975 -0.636920263152692; -1.133723726088965 -0.2794303935247712 … -1.8474875671067272 -1.624845168672262; … ; -0.019691165999940127 0.19407567560098593 … -2.212463984770261 -1.6702550636975246; -0.05756315295374824 0.9130431768029524 … -2.3097843345239113 -1.425788048787491], bias = [0.05543042816851054; -0.030208122424790002; … ; -0.14090333206305344; 0.054052396115657236;;]), layer_4 = (weight = [-0.7420843917671103 -1.244443565150813 … -0.5028502170750929 -0.6065874222127688; -0.8881570265535546 -1.6633703287456951 … -1.1570361872669626 -1.051925836990851; … ; -0.4533888162055985 -1.5491554486326107 … -1.249228740683466 -0.4765193641602929; -0.4331486235277653 -1.5190637671416434 … -0.8466838586301554 -0.8318455444802075], bias = [1.2770756859876964; 0.9982879198237775; … ; 0.9391271777288901; 0.99866575934183;;]), layer_5 = (weight = [4.403621692234441 5.11554179018082 … 4.440141978496844 4.730340967097789], bias = [0.9360646794165361;;]))

We then use the remake function to rebuild the PDE problem to start a new optimization at the optimized parameters, and continue with a lower learning rate:

prob = remake(prob, u0 = res.u)
-res = Optimization.solve(prob, Adam(0.001); callback = callback, maxiters = 2500);
u: ComponentVector{Float64}(layer_1 = (weight = [-1.5062229082324834 -0.14797582447719582 -0.14609241251078678; 3.0734663477070967 0.42388539653008533 0.3311830382595779; … ; 0.4361094270721975 0.6164381698193994 1.114185607026043; 2.6437535219571835 -0.019234153842594404 0.15401133077159396], bias = [0.35696366940913044; -0.6255173191526554; … ; -0.11843034141502841; -2.080375832099964;;]), layer_2 = (weight = [0.46765326586199274 -1.2918109225018954 … -0.30318976739650405 -0.23712220094704756; 0.9816605713668661 -0.417295030054861 … -0.5364515813100952 -0.7891799171263015; … ; -1.0679052478313777 0.3875259972858228 … 0.0398615709150352 -0.5447272728294701; -0.07003226607092439 0.9003234420459449 … -0.046574814677672105 -0.5632968505394537], bias = [0.30174304536086627; -0.23389156837462358; … ; 0.06586548242447984; 0.21886801556181262;;]), layer_3 = (weight = [0.12738082046489704 0.32932750912663367 … -4.791841987936922 -0.5331516321937507; -1.458823481757558 -0.541756998842781 … -0.4609760802821149 -3.308501754906163; … ; -0.552704740985188 -0.4105516977841391 … -1.7432632970143052 -2.8086251831881883; -0.5012111238923378 0.22806919338552814 … -2.4726924262517866 -1.4130712675392503], bias = [0.14420838542188438; -0.8937796896520792; … ; -0.4973054187516959; 0.11074797727345934;;]), layer_4 = (weight = [-1.3403788354120367 1.4381374211503437 … 2.0692984272797514 0.2040080302093945; -1.005661594838709 -1.9232596735230543 … -1.2843042007297936 -1.2625417409209996; … ; -0.4381379715166813 -1.789310593678225 … -1.3563708445234393 -0.6200879497839351; -0.5425922169353402 -2.201593680362791 … -1.650136900408549 -0.566582615070083], bias = [1.0685132190002258; 0.7990514994481983; … ; 0.7819739716533269; 0.9942071741195916;;]), layer_5 = (weight = [5.877311024076123 5.291744297623082 … 4.558480743822748 4.741339371843404], bias = [1.7276095658272683;;]))

Step 7: Inspect the PINN's Solution

Finally, we inspect the solution:

phi = discretization.phi
+res = Optimization.solve(prob, Adam(0.01); callback = callback, maxiters = 2500);
u: ComponentVector{Float64}(layer_1 = (weight = [1.9568436384913166 0.9352000603918599 0.926928887258572; -1.233217787349399 -1.2720784190088168 -1.2552886952371636; … ; -3.353617176173383 0.5571922753783344 0.5678197891049207; 2.2629220149993294 0.7857451671941419 0.5589943918944495], bias = [-0.9257573033742932; 0.7596623355480198; … ; -1.7689892306079456; -0.5062586744525331;;]), layer_2 = (weight = [0.21867608010183223 -0.1705219038537738 … -2.447648590669474 0.41972587852008764; -0.524571047601292 1.456886815441886 … 1.3601261065180883 -0.31872693531489027; … ; -0.3183262081364054 2.8612875609777157 … 1.4822172145918542 -0.568851905402844; -0.5837146403987938 1.2995784395384726 … -0.12697074781669687 -0.3757769850832683], bias = [-0.15031844515432938; 0.1361349163723271; … ; 0.5178456369612462; 0.14439486950308808;;]), layer_3 = (weight = [0.1063714712450918 -0.8447008762965669 … 1.7692224681205537 0.9108125320780537; -0.05465507136950013 0.5789784188509567 … 0.9763846952829217 -0.22688002145409578; … ; 0.2648870981136484 -1.1025750842662951 … 0.8673875362391695 -0.585746961406695; 0.2407147140821442 -0.35014595323176156 … 0.6002839419081641 2.0643283309527996], bias = [-0.08432509339153067; 0.25282578673187556; … ; -0.28193326409149044; -0.10750559168162689;;]), layer_4 = (weight = [-1.7320775345641128 1.5200890658697748 … -2.142872032571841 0.869392118139181; 0.6486586562135914 -0.9191739836374284 … 0.9231355674862283 0.31070582940954206; … ; 0.800855926499982 -1.145999839247983 … 0.6432622891529114 -0.10184988611402056; 0.8020871202099161 -1.3375985653981033 … 0.8901869365786744 0.1513909719474414], bias = [1.0519124593974098; -1.0079786957504568; … ; -1.1565976297222098; -0.9760697579072701;;]), layer_5 = (weight = [6.823046448405784 -2.888353471614459 … -2.5527200812459654 -3.2232520765431096], bias = [1.0801111265502792;;]))

We then use the remake function to rebuild the PDE problem to start a new optimization at the optimized parameters, and continue with a lower learning rate:

prob = remake(prob, u0 = res.u)
+res = Optimization.solve(prob, Adam(0.001); callback = callback, maxiters = 2500);
u: ComponentVector{Float64}(layer_1 = (weight = [2.0135850997021763 0.9587759454191301 0.9355573293122602; -0.8777578255360221 -0.8593567746082719 -0.8317138518890145; … ; -3.5594109723082394 0.47253459422798333 0.47910317265404423; 2.367239907206683 0.8050702734729074 0.6703960316964171], bias = [-1.1837490638356796; 1.7713485431385103; … ; -1.8545859357853867; -0.6312634511256747;;]), layer_2 = (weight = [0.21784217800381864 -0.43452573841248454 … -2.7005384641351053 0.4217274967295832; -0.5442485134689353 1.0948792238497447 … 1.2079415152249293 -0.33573404477724456; … ; -0.3624780185991143 5.125859848669328 … 1.0009080727503092 -0.6114752966559314; -0.6768062906550752 1.7042495558328767 … -0.23041029443574454 -0.459373980900094], bias = [-0.16616686608270745; 0.1240363412351296; … ; 0.4806338976554095; 0.0902778543252487;;]), layer_3 = (weight = [-0.019865483296844184 -0.9284545975440724 … 1.8160285725364391 1.0826783742817108; 0.03680191738856431 0.6213141694037566 … 0.9652799579065268 -0.386334134405294; … ; 0.29334661094623043 -1.112448273975369 … 0.8854704419015216 -0.7869564811015286; 0.4041196355607152 -0.3003272548013927 … 0.9998704213845416 1.647945543431292], bias = [-0.20997576068929616; 0.34852601669853694; … ; -0.27795027303753456; -0.04698270289622955;;]), layer_4 = (weight = [-2.1086342298366776 2.1104469663323577 … -2.594939402351366 1.5589004205205832; 0.6310725264834314 -0.892213817619556 … 1.0679478827330657 0.20737979111317048; … ; 0.9715980207477212 -1.1334866098400462 … 0.6742221069751304 -0.3406147010894299; 0.853751977845453 -1.3069612062499198 … 1.2160432104355892 -0.022605877862942176], bias = [1.6452868232847004; -0.9786357956987032; … ; -1.1421680677622996; -0.9529173038148525;;]), layer_5 = (weight = [9.332959762800908 -3.7193506156245877 … -3.2565754651568555 -4.162257592533208], bias = [0.6081804371821448;;]))

Step 7: Inspect the PINN's Solution

Finally, we inspect the solution:

phi = discretization.phi
 ts, xs, ys = [infimum(d.domain):0.1:supremum(d.domain) for d in domains]
 u_real = [analytic_sol_func(t, x, y) for t in ts for x in xs for y in ys]
 u_predict = [first(Array(phi(gpu([t, x, y]), res.u))) for t in ts for x in xs for y in ys]
@@ -83,4 +83,4 @@
     gif(anim, "3pde.gif", fps = 10)
 end
 
-plot_(res)

3pde

+plot_(res)

3pde

diff --git a/dev/showcase/showcase/index.html b/dev/showcase/showcase/index.html index eb5078bd357..df4bcd74da2 100644 --- a/dev/showcase/showcase/index.html +++ b/dev/showcase/showcase/index.html @@ -1,2 +1,2 @@ -The SciML Showcase · Overview of Julia's SciML

The SciML Showcase

The SciML Showcase is a display of some cool things that can be done by connecting SciML software.

Note

The SciML Showcase is not meant to be training/tutorials, but inspirational demonstrations! If you're looking for simple examples to get started with, check out the getting started section.

Want to see some cool things that you can do with SciML? Check out the following:

+The SciML Showcase · Overview of Julia's SciML

The SciML Showcase

The SciML Showcase is a display of some cool things that can be done by connecting SciML software.

Note

The SciML Showcase is not meant to be training/tutorials, but inspirational demonstrations! If you're looking for simple examples to get started with, check out the getting started section.

Want to see some cool things that you can do with SciML? Check out the following:

diff --git a/dev/showcase/symbolic_analysis/index.html b/dev/showcase/symbolic_analysis/index.html index 43204b89fa5..e690b82fa76 100644 --- a/dev/showcase/symbolic_analysis/index.html +++ b/dev/showcase/symbolic_analysis/index.html @@ -175,4 +175,4 @@ funcs_to_check = to_check, p = 0.9) # Dict{Num, Symbol} with 2 entries: # b => :globally -# c => :globally

Both parameters b, c are globally identifiable with probability 0.9 in this case.

+# c => :globally

Both parameters b, c are globally identifiable with probability 0.9 in this case.