Skip to content

Commit

Permalink
Merge branch 'main' of github.com:SciML/LinearSolve.jl into ap/adjoint
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal committed Feb 24, 2024
2 parents 06c09a3 + a206054 commit dc8d7e6
Show file tree
Hide file tree
Showing 47 changed files with 971 additions and 748 deletions.
1 change: 1 addition & 0 deletions .JuliaFormatter.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
style = "sciml"
format_markdown = true
format_docstrings = true
annotate_untyped_fields_with_any = false
2 changes: 1 addition & 1 deletion .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ updates:
interval: "weekly"
ignore:
- dependency-name: "crate-ci/typos"
update-types: ["version-update:semver-patch"]
update-types: ["version-update:semver-patch", "version-update:semver-minor"]
2 changes: 1 addition & 1 deletion .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
- uses: julia-actions/setup-julia@v1
with:
version: ${{ matrix.version }}
- uses: actions/cache@v3
- uses: actions/cache@v4
env:
cache-name: cache-artifacts
with:
Expand Down
31 changes: 31 additions & 0 deletions .github/workflows/Downgrade.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Downgrade
on:
pull_request:
branches:
- main
paths-ignore:
- 'docs/**'
push:
branches:
- main
paths-ignore:
- 'docs/**'
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
version: ['1']
group:
- Core
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v1
with:
version: ${{ matrix.version }}
- uses: julia-actions/julia-downgrade-compat@v1
# if: ${{ matrix.version == '1.6' }}
with:
skip: Pkg,TOML
- uses: julia-actions/julia-buildpkg@v1
- uses: julia-actions/julia-runtest@v1
6 changes: 5 additions & 1 deletion .github/workflows/Downstream.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ jobs:
env:
GROUP: ${{ matrix.package.group }}
strategy:
fail-fast: false
matrix:
julia-version: [1]
os: [ubuntu-latest]
Expand All @@ -20,7 +21,7 @@ jobs:
- {user: SciML, repo: ModelingToolkit.jl, group: All}
- {user: SciML, repo: SciMLSensitivity.jl, group: Core1}
- {user: SciML, repo: BoundaryValueDiffEq.jl, group: All}

- {user: SciML, repo: NonlinearSolve.jl, group: All}
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v1
Expand Down Expand Up @@ -50,6 +51,9 @@ jobs:
@info "Not compatible with this release. No problem." exception=err
exit(0) # Exit immediately, as a success
end
env:
RETESTITEMS_NWORKERS: 4
RETESTITEMS_NWORKER_THREADS: 2
- uses: julia-actions/julia-processcoverage@v1
- uses: codecov/codecov-action@v3
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/SpellCheck.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ jobs:
- name: Checkout Actions Repository
uses: actions/checkout@v4
- name: Check spelling
uses: crate-ci/typos@v1.16.23
uses: crate-ci/typos@v1.18.0
62 changes: 31 additions & 31 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "LinearSolve"
uuid = "7ed4a6bd-45f5-4d41-b270-4a48e9bafcae"
authors = ["SciML"]
version = "2.22.0"
version = "2.25.0"

[deps]
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
Expand All @@ -22,7 +22,6 @@ PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a"
Preferences = "21216c6a-2e73-6563-6e65-726566657250"
RecursiveFactorization = "f2c3362d-daeb-58d1-803e-2bc74f2840b4"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462"
SciMLOperators = "c0aeaf25-5076-4817-a8d5-81caf7dfa961"
Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46"
Expand Down Expand Up @@ -63,53 +62,53 @@ LinearSolveRecursiveArrayToolsExt = "RecursiveArrayTools"
[compat]
AllocCheck = "0.1"
Aqua = "0.8"
ArrayInterface = "7.4.11"
BandedMatrices = "1"
BlockDiagonals = "0.1"
ArrayInterface = "7.7"
BandedMatrices = "1.5"
BlockDiagonals = "0.1.42"
CUDA = "5"
ConcreteStructs = "0.2"
DocStringExtensions = "0.9"
EnumX = "1"
Enzyme = "0.11"
EnzymeCore = "0.6"
ConcreteStructs = "0.2.3"
DocStringExtensions = "0.9.3"
EnumX = "1.0.4"
Enzyme = "0.11.15"
EnzymeCore = "0.6.5"
FastAlmostBandedMatrices = "0.1"
FastLapackInterface = "2"
FiniteDiff = "2"
ForwardDiff = "0.10"
GPUArraysCore = "0.1"
FiniteDiff = "2.22"
ForwardDiff = "0.10.36"
GPUArraysCore = "0.1.6"
HYPRE = "1.4.0"
InteractiveUtils = "1.6"
InteractiveUtils = "1.10"
IterativeSolvers = "0.9.3"
JET = "0.8"
KLU = "0.3.0, 0.4"
KernelAbstractions = "0.9"
JET = "0.8.28"
KLU = "0.5"
KernelAbstractions = "0.9.16"
Krylov = "0.9"
KrylovKit = "0.6"
Libdl = "1.6"
LinearAlgebra = "1.9"
Libdl = "1.10"
LinearAlgebra = "1.10"
MPI = "0.20"
Metal = "0.5"
MultiFloats = "1"
Pardiso = "0.5"
Pkg = "1"
PrecompileTools = "1"
Preferences = "1"
PrecompileTools = "1.2"
Preferences = "1.4"
Random = "1"
RecursiveArrayTools = "2, 3"
RecursiveFactorization = "0.2.8"
RecursiveArrayTools = "3.8"
RecursiveFactorization = "0.2.14"
Reexport = "1"
Requires = "1"
SafeTestsets = "0.1"
SciMLBase = "2"
SciMLOperators = "0.3"
SciMLBase = "2.23.0"
SciMLOperators = "0.3.7"
Setfield = "1"
SparseArrays = "1.9"
SparseArrays = "1.10"
Sparspak = "0.3.6"
StaticArrays = "1"
StaticArraysCore = "1"
StableRNGs = "1"
StaticArrays = "1.5"
StaticArraysCore = "1.4.2"
Test = "1"
UnPack = "1"
julia = "1.9"
julia = "1.10"

[extras]
AllocCheck = "9b6a8646-10ed-4001-bbdc-1d2f46dfbb1a"
Expand All @@ -133,8 +132,9 @@ Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
RecursiveArrayTools = "731186ca-8d62-57ce-b412-fbd966d074cd"
SafeTestsets = "1bc83da4-3b8d-516f-aca4-4fe02f6d838f"
StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Aqua", "Test", "IterativeSolvers", "InteractiveUtils", "JET", "KrylovKit", "Pkg", "Random", "SafeTestsets", "MultiFloats", "ForwardDiff", "HYPRE", "MPI", "BlockDiagonals", "Enzyme", "FiniteDiff", "BandedMatrices", "FastAlmostBandedMatrices", "StaticArrays", "AllocCheck"]
test = ["Aqua", "Test", "IterativeSolvers", "InteractiveUtils", "JET", "KrylovKit", "Pkg", "Random", "SafeTestsets", "MultiFloats", "ForwardDiff", "HYPRE", "MPI", "BlockDiagonals", "Enzyme", "FiniteDiff", "BandedMatrices", "FastAlmostBandedMatrices", "StaticArrays", "AllocCheck", "StableRNGs"]
2 changes: 1 addition & 1 deletion benchmarks/applelu.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ algs = [
GenericLUFactorization(),
RFLUFactorization(),
AppleAccelerateLUFactorization(),
MetalLUFactorization(),
MetalLUFactorization()
]
res = [Float32[] for i in 1:length(algs)]

Expand Down
2 changes: 1 addition & 1 deletion benchmarks/lu.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ algs = [
RFLUFactorization(),
MKLLUFactorization(),
FastLUFactorization(),
SimpleLUFactorization(),
SimpleLUFactorization()
]
res = [Float64[] for i in 1:length(algs)]

Expand Down
5 changes: 3 additions & 2 deletions benchmarks/sparselu.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ algs = [
UMFPACKFactorization(),
KLUFactorization(),
MKLPardisoFactorize(),
SparspakFactorization(),
SparspakFactorization()
]
cols = [:red, :blue, :green, :magenta, :turqoise] # one color per alg
lst = [:dash, :solid, :dashdot] # one line style per dim
Expand Down Expand Up @@ -65,7 +65,8 @@ function run_and_plot(; dims = [1, 2, 3], kmax = 12)
u0 = rand(rng, n)

for j in 1:length(algs)
bt = @belapsed solve(prob, $(algs[j])).u setup=(prob = LinearProblem(copy($A),
bt = @belapsed solve(prob, $(algs[j])).u setup=(prob = LinearProblem(
copy($A),
copy($b);
u0 = copy($u0),
alias_A = true,
Expand Down
6 changes: 3 additions & 3 deletions docs/pages.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@

pages = ["index.md",
"Tutorials" => Any["tutorials/linear.md"
"tutorials/caching_interface.md"],
"tutorials/caching_interface.md"],
"Basics" => Any["basics/LinearProblem.md",
"basics/common_solver_opts.md",
"basics/OperatorAssumptions.md",
"basics/Preconditioners.md",
"basics/FAQ.md"],
"Solvers" => Any["solvers/solvers.md"],
"Advanced" => Any["advanced/developing.md"
"advanced/custom.md"],
"Release Notes" => "release_notes.md",
"advanced/custom.md"],
"Release Notes" => "release_notes.md"
]
2 changes: 1 addition & 1 deletion docs/src/advanced/developing.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ basic machinery. A simplified version is:
struct MyLUFactorization{P} <: SciMLBase.AbstractLinearAlgorithm end

function init_cacheval(alg::MyLUFactorization, A, b, u, Pl, Pr, maxiters, abstol, reltol,
verbose)
verbose)
lu!(convert(AbstractMatrix, A))
end

Expand Down
14 changes: 10 additions & 4 deletions docs/src/basics/Preconditioners.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,13 +83,13 @@ The following preconditioners match the interface of LinearSolve.jl.
- [Preconditioners.CholeskyPreconditioner(A, i)](https://github.com/JuliaLinearAlgebra/Preconditioners.jl):
An incomplete Cholesky preconditioner with cut-off level `i`. Requires `A` as
a `AbstractMatrix` and positive semi-definite.
- [AlgebraicMultiGrid](https://github.com/JuliaLinearAlgebra/AlgebraicMultigrid.jl):
- [AlgebraicMultigrid](https://github.com/JuliaLinearAlgebra/AlgebraicMultigrid.jl):
Implementations of the algebraic multigrid method. Must be converted to a
preconditioner via `AlgebraicMultiGrid.aspreconditioner(AlgebraicMultiGrid.precmethod(A))`.
preconditioner via `AlgebraicMultigrid.aspreconditioner(AlgebraicMultigrid.precmethod(A))`.
Requires `A` as a `AbstractMatrix`. Provides the following methods:

+ `AlgebraicMultiGrid.ruge_stuben(A)`
+ `AlgebraicMultiGrid.smoothed_aggregation(A)`
+ `AlgebraicMultigrid.ruge_stuben(A)`
+ `AlgebraicMultigrid.smoothed_aggregation(A)`
- [PyAMG](https://github.com/cortner/PyAMG.jl):
Implementations of the algebraic multigrid method. Must be converted to a
preconditioner via `PyAMG.aspreconditioner(PyAMG.precmethod(A))`.
Expand All @@ -111,3 +111,9 @@ The following preconditioners match the interface of LinearSolve.jl.
preconditioners which supports distributed computing via MPI. These can be
written using the LinearSolve.jl interface choosing algorithms like `HYPRE.ILU`
and `HYPRE.BoomerAMG`.
- [KrylovPreconditioners.jl](https://github.com/JuliaSmoothOptimizers/KrylovPreconditioners.jl/): Provides GPU-ready
preconditioners via KernelAbstractions.jl. At the time of writing the package provides the following methods:

+ Incomplete Cholesky decomposition `KrylovPreconditioners.kp_ic0(A)`
+ Incomplete LU decomposition `KrylovPreconditioners.kp_ilu0(A)`
+ Block Jacobi `KrylovPreconditioners.BlockJacobiPreconditioner(A, nblocks, device)`
11 changes: 6 additions & 5 deletions ext/LinearSolveBandedMatricesExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ module LinearSolveBandedMatricesExt

using BandedMatrices, LinearAlgebra, LinearSolve
import LinearSolve: defaultalg,
do_factorization, init_cacheval, DefaultLinearSolver, DefaultAlgorithmChoice
do_factorization, init_cacheval, DefaultLinearSolver,
DefaultAlgorithmChoice

# Defaults for BandedMatrices
function defaultalg(A::BandedMatrix, b, oa::OperatorAssumptions{Bool})
Expand Down Expand Up @@ -35,14 +36,14 @@ for alg in (:SVDFactorization, :MKLLUFactorization, :DiagonalFactorization,
:AppleAccelerateLUFactorization, :CholeskyFactorization)
@eval begin
function init_cacheval(::$(alg), ::BandedMatrix, b, u, Pl, Pr, maxiters::Int,
abstol, reltol, verbose::Bool, assumptions::OperatorAssumptions)
abstol, reltol, verbose::Bool, assumptions::OperatorAssumptions)
return nothing
end
end
end

function init_cacheval(::LUFactorization, A::BandedMatrix, b, u, Pl, Pr, maxiters::Int,
abstol, reltol, verbose::Bool, assumptions::OperatorAssumptions)
abstol, reltol, verbose::Bool, assumptions::OperatorAssumptions)
return lu(similar(A, 0, 0))
end

Expand All @@ -54,8 +55,8 @@ for alg in (:SVDFactorization, :MKLLUFactorization, :DiagonalFactorization,
:AppleAccelerateLUFactorization, :QRFactorization, :LUFactorization)
@eval begin
function init_cacheval(::$(alg), ::Symmetric{<:Number, <:BandedMatrix}, b, u, Pl,
Pr, maxiters::Int, abstol, reltol, verbose::Bool,
assumptions::OperatorAssumptions)
Pr, maxiters::Int, abstol, reltol, verbose::Bool,
assumptions::OperatorAssumptions)
return nothing
end
end
Expand Down
2 changes: 1 addition & 1 deletion ext/LinearSolveBlockDiagonalsExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ module LinearSolveBlockDiagonalsExt
using LinearSolve, BlockDiagonals

function LinearSolve.init_cacheval(alg::SimpleGMRES{false}, A::BlockDiagonal, b, args...;
kwargs...)
kwargs...)
@assert ndims(A)==2 "ndims(A) == $(ndims(A)). `A` must have ndims == 2."
# We need to perform this check even when `zeroinit == true`, since the type of the
# cache is dependent on whether we are able to use the specialized dispatch.
Expand Down
6 changes: 3 additions & 3 deletions ext/LinearSolveCUDAExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ using LinearSolve.LinearAlgebra, LinearSolve.SciMLBase, LinearSolve.ArrayInterfa
using SciMLBase: AbstractSciMLOperator

function SciMLBase.solve!(cache::LinearSolve.LinearCache, alg::CudaOffloadFactorization;
kwargs...)
kwargs...)
if cache.isfresh
fact = qr(CUDA.CuArray(cache.A))
cache.cacheval = fact
Expand All @@ -18,8 +18,8 @@ function SciMLBase.solve!(cache::LinearSolve.LinearCache, alg::CudaOffloadFactor
end

function LinearSolve.init_cacheval(alg::CudaOffloadFactorization, A, b, u, Pl, Pr,
maxiters::Int, abstol, reltol, verbose::Bool,
assumptions::OperatorAssumptions)
maxiters::Int, abstol, reltol, verbose::Bool,
assumptions::OperatorAssumptions)
qr(CUDA.CuArray(A))
end

Expand Down
Loading

0 comments on commit dc8d7e6

Please sign in to comment.