From 35d277bf2ad2de6994e011107102d13a6fe71095 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Wed, 2 Oct 2024 12:17:49 -0400 Subject: [PATCH 1/3] feat: using DI for structured Jacobians --- Project.toml | 4 +- src/NonlinearSolve.jl | 3 - src/internal/jacobian.jl | 118 +++++++-------------------------------- 3 files changed, 22 insertions(+), 103 deletions(-) diff --git a/Project.toml b/Project.toml index 7d8f1f907..0f77d7966 100644 --- a/Project.toml +++ b/Project.toml @@ -30,7 +30,6 @@ Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46" SimpleNonlinearSolve = "727e6d20-b764-4bd8-a329-72de5adea6c7" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SparseConnectivityTracer = "9f842d2f-2579-4b1d-911e-f412cf18a3f5" -SparseDiffTools = "47a9eef4-7e08-11e9-0b38-333d64bd3804" SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35" StaticArraysCore = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" SymbolicIndexingInterface = "2efcf032-c050-4f8e-a9bb-153293bab1f5" @@ -146,6 +145,7 @@ Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" ReTestItems = "817f1d60-ba6b-4fd5-9520-3cf149f6a823" SIAMFANLEquations = "084e46ad-d928-497d-ad5e-07fa361a48c4" +SparseDiffTools = "47a9eef4-7e08-11e9-0b38-333d64bd3804" SpeedMapping = "f1835b91-879b-4a3f-a438-e4baacf14412" StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3" StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" @@ -155,4 +155,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [targets] -test = ["Aqua", "BandedMatrices", "BenchmarkTools", "CUDA", "Enzyme", "ExplicitImports", "FastLevenbergMarquardt", "FixedPointAcceleration", "Hwloc", "InteractiveUtils", "LeastSquaresOptim", "MINPACK", "ModelingToolkit", "NLSolvers", "NLsolve", "NaNMath", "NonlinearProblemLibrary", "OrdinaryDiffEqTsit5", "Pkg", "Random", "ReTestItems", "SIAMFANLEquations", "SpeedMapping", "StableRNGs", "StaticArrays", "Sundials", "Symbolics", "Test", "Zygote"] +test = ["Aqua", "BandedMatrices", "BenchmarkTools", "CUDA", "Enzyme", "ExplicitImports", "FastLevenbergMarquardt", "FixedPointAcceleration", "Hwloc", "InteractiveUtils", "LeastSquaresOptim", "MINPACK", "ModelingToolkit", "NLSolvers", "NLsolve", "NaNMath", "NonlinearProblemLibrary", "OrdinaryDiffEqTsit5", "Pkg", "Random", "ReTestItems", "SIAMFANLEquations", "SparseDiffTools", "SpeedMapping", "StableRNGs", "StaticArrays", "Sundials", "Symbolics", "Test", "Zygote"] diff --git a/src/NonlinearSolve.jl b/src/NonlinearSolve.jl index 575c4cc1c..ab5e31afc 100644 --- a/src/NonlinearSolve.jl +++ b/src/NonlinearSolve.jl @@ -54,9 +54,6 @@ using SciMLJacobianOperators: AbstractJacobianOperator, JacobianOperator, VecJac ## Sparse AD Support using SparseArrays: AbstractSparseMatrix, SparseMatrixCSC using SparseConnectivityTracer: TracerSparsityDetector # This can be dropped in the next release -using SparseDiffTools: SparseDiffTools, JacPrototypeSparsityDetection, - PrecomputedJacobianColorvec, init_jacobian, sparse_jacobian, - sparse_jacobian!, sparse_jacobian_cache using SparseMatrixColorings: ConstantColoringAlgorithm, GreedyColoringAlgorithm, LargestFirst diff --git a/src/internal/jacobian.jl b/src/internal/jacobian.jl index f8264c79a..c152fc749 100644 --- a/src/internal/jacobian.jl +++ b/src/internal/jacobian.jl @@ -37,7 +37,6 @@ Construct a cache for the Jacobian of `f` w.r.t. `u`. stats::NLStats autodiff di_extras - sdifft_extras end function reinit_cache!(cache::JacobianCache{iip}, args...; p = cache.p, @@ -63,31 +62,13 @@ function JacobianCache(prob, alg, f::F, fu_, u, p; stats, autodiff = nothing, if !has_analytic_jac && needs_jac autodiff = construct_concrete_adtype(f, autodiff) - using_sparsedifftools = autodiff isa StructuredMatrixAutodiff - # SparseMatrixColorings can't handle structured matrices - if using_sparsedifftools - di_extras = nothing - uf = JacobianWrapper{iip}(f, p) - sdifft_extras = if iip - sparse_jacobian_cache( - autodiff.autodiff, autodiff.sparsity_detection, uf, fu, u) - else - sparse_jacobian_cache(autodiff.autodiff, autodiff.sparsity_detection, - uf, __maybe_mutable(u, autodiff); fx = fu) - end - autodiff = autodiff.autodiff # For saving we unwrap + di_extras = if iip + DI.prepare_jacobian(f, fu, autodiff, u, Constant(prob.p)) else - sdifft_extras = nothing - di_extras = if iip - DI.prepare_jacobian(f, fu, autodiff, u, Constant(prob.p)) - else - DI.prepare_jacobian(f, autodiff, u, Constant(prob.p)) - end + DI.prepare_jacobian(f, autodiff, u, Constant(prob.p)) end else - using_sparsedifftools = false di_extras = nothing - sdifft_extras = nothing end J = if !needs_jac @@ -98,22 +79,18 @@ function JacobianCache(prob, alg, f::F, fu_, u, p; stats, autodiff = nothing, JacobianOperator(prob, fu, u; jvp_autodiff, vjp_autodiff) else if f.jac_prototype === nothing - if !using_sparsedifftools - # While this is technically wasteful, it gives out the type of the Jacobian - # which is needed to create the linear solver cache - stats.njacs += 1 - if has_analytic_jac - __similar( - fu, promote_type(eltype(fu), eltype(u)), length(fu), length(u)) + # While this is technically wasteful, it gives out the type of the Jacobian + # which is needed to create the linear solver cache + stats.njacs += 1 + if has_analytic_jac + __similar( + fu, promote_type(eltype(fu), eltype(u)), length(fu), length(u)) + else + if iip + DI.jacobian(f, fu, di_extras, autodiff, u, Constant(p)) else - if iip - DI.jacobian(f, fu, di_extras, autodiff, u, Constant(p)) - else - DI.jacobian(f, autodiff, u, Constant(p)) - end + DI.jacobian(f, autodiff, u, Constant(p)) end - else - zero(init_jacobian(sdifft_extras; preserve_immutable = Val(true))) end else jac_proto = if eltype(f.jac_prototype) <: Bool @@ -126,20 +103,19 @@ function JacobianCache(prob, alg, f::F, fu_, u, p; stats, autodiff = nothing, end end - return JacobianCache{iip}( - J, f, fu, u, p, stats, autodiff, di_extras, sdifft_extras) + return JacobianCache{iip}(J, f, fu, u, p, stats, autodiff, di_extras) end function JacobianCache(prob, alg, f::F, ::Number, u::Number, p; stats, autodiff = nothing, kwargs...) where {F} fu = f(u, p) if SciMLBase.has_jac(f) || SciMLBase.has_vjp(f) || SciMLBase.has_jvp(f) - return JacobianCache{false}(u, f, fu, u, p, stats, autodiff, nothing, nothing) + return JacobianCache{false}(u, f, fu, u, p, stats, autodiff, nothing) end autodiff = get_dense_ad(get_concrete_forward_ad( autodiff, prob; check_forward_mode = false)) di_extras = DI.prepare_derivative(f, get_dense_ad(autodiff), u, Constant(prob.p)) - return JacobianCache{false}(u, f, fu, u, p, stats, autodiff, di_extras, nothing) + return JacobianCache{false}(u, f, fu, u, p, stats, autodiff, di_extras) end (cache::JacobianCache)(u = cache.u) = cache(cache.J, u, cache.p) @@ -172,27 +148,16 @@ function (cache::JacobianCache{iip})( if iip if SciMLBase.has_jac(cache.f) cache.f.jac(J, u, p) - elseif cache.di_extras !== nothing + else DI.jacobian!( cache.f, cache.fu, J, cache.di_extras, cache.autodiff, u, Constant(p)) - else - uf = JacobianWrapper{iip}(cache.f, p) - sparse_jacobian!(J, cache.autodiff, cache.sdifft_extras, uf, cache.fu, u) end return J else if SciMLBase.has_jac(cache.f) return cache.f.jac(u, p) - elseif cache.di_extras !== nothing - return DI.jacobian(cache.f, cache.di_extras, cache.autodiff, u, Constant(p)) else - uf = JacobianWrapper{iip}(cache.f, p) - if __can_setindex(typeof(J)) - sparse_jacobian!(J, cache.autodiff, cache.sdifft_extras, uf, u) - return J - else - return sparse_jacobian(cache.autodiff, cache.sdifft_extras, uf, u) - end + return DI.jacobian(cache.f, cache.di_extras, cache.autodiff, u, Constant(p)) end end end @@ -207,10 +172,6 @@ function construct_concrete_adtype(f::NonlinearFunction, ad::AbstractADType) end return ad # No sparse AD else - if ArrayInterface.isstructured(f.jac_prototype) - return select_fastest_structured_matrix_autodiff(f.jac_prototype, f, ad) - end - return AutoSparse( ad; sparsity_detector = KnownJacobianSparsityDetector(f.jac_prototype), @@ -227,10 +188,6 @@ function construct_concrete_adtype(f::NonlinearFunction, ad::AbstractADType) Base.depwarn("`sparsity::typeof($(typeof(f.sparsity)))` is deprecated. \ Pass it as `jac_prototype` instead.", :NonlinearSolve) - if ArrayInterface.isstructured(f.sparsity) - return select_fastest_structured_matrix_autodiff(f.sparsity, f, ad) - end - return AutoSparse( ad; sparsity_detector = KnownJacobianSparsityDetector(f.sparsity), @@ -252,11 +209,8 @@ function construct_concrete_adtype(f::NonlinearFunction, ad::AbstractADType) coloring_algorithm = GreedyColoringAlgorithm(LargestFirst()) ) else - if ArrayInterface.isstructured(f.jac_prototype) - return select_fastest_structured_matrix_autodiff(f.jac_prototype, f, ad) - end - - if f.jac_prototype isa AbstractSparseMatrix + if f.jac_prototype isa AbstractSparseMatrix || + ArrayInterface.isstructured(f.jac_prototype) if !(sparsity_detector isa NoSparsityDetector) @warn "`jac_prototype` is a sparse matrix but sparsity = $(f.sparsity) \ has also been specified. Ignoring sparsity field and using \ @@ -275,38 +229,6 @@ function construct_concrete_adtype(f::NonlinearFunction, ad::AbstractADType) end end -@concrete struct StructuredMatrixAutodiff <: AbstractADType - autodiff <: AbstractADType - sparsity_detection -end - -function select_fastest_structured_matrix_autodiff( - prototype::AbstractMatrix, f::NonlinearFunction, ad::AbstractADType) - sparsity_detection = if SciMLBase.has_colorvec(f) - PrecomputedJacobianColorvec(; - jac_prototype = prototype, - f.colorvec, - partition_by_rows = ADTypes.mode(ad) isa ADTypes.ReverseMode - ) - else - if ArrayInterface.fast_matrix_colors(prototype) - colorvec = if ADTypes.mode(ad) isa ADTypes.ForwardMode - ArrayInterface.matrix_colors(prototype) - else - ArrayInterface.matrix_colors(prototype') - end - PrecomputedJacobianColorvec(; - jac_prototype = prototype, - colorvec, - partition_by_rows = ADTypes.mode(ad) isa ADTypes.ReverseMode - ) - else - JacPrototypeSparsityDetection(; jac_prototype = prototype) - end - end - return StructuredMatrixAutodiff(AutoSparse(ad), sparsity_detection) -end - function select_fastest_coloring_algorithm( prototype, f::NonlinearFunction, ad::AbstractADType) if SciMLBase.has_colorvec(f) From 44143ec8b91ab2ac9aefc25be3452cfc200f0893 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Wed, 2 Oct 2024 12:51:55 -0400 Subject: [PATCH 2/3] docs: add a table to guarantee selections --- docs/src/basics/sparsity_detection.md | 39 ++++++++++++++++++++------- src/internal/jacobian.jl | 28 +++++++++++++------ 2 files changed, 50 insertions(+), 17 deletions(-) diff --git a/docs/src/basics/sparsity_detection.md b/docs/src/basics/sparsity_detection.md index 241399d99..fc03528ac 100644 --- a/docs/src/basics/sparsity_detection.md +++ b/docs/src/basics/sparsity_detection.md @@ -6,6 +6,34 @@ to use this in an actual problem, see Notation wise we are trying to solve for `x` such that `nlfunc(x) = 0`. +## Big Table for Determining Sparsity Detection and Coloring Algorithms + +| `f.sparsity` | `f.jac_prototype` | `f.colorvec` | Sparsity Detection | Coloring Algorithm | +| :------------------------- | :---------------- | :----------- | :----------------------------------------------- | :---------------------------------------- | +| ❌ | ❌ | `Any` | `NoSparsityDetector()` | `NoColoringAlgorithm()` | +| ❌ | Not Structured | `Any` | `NoSparsityDetector()` | `NoColoringAlgorithm()` | +| ❌ | Structured | ✅ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | +| ❌ | Structured | ❌ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | +| - | - | - | - | - | +| `AbstractMatrix` | ❌ | ✅ | `KnownJacobianSparsityDetector(f.sparsity)` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractMatrix` | ❌ | ❌ | `KnownJacobianSparsityDetector(f.sparsity)` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractMatrix` | Not Structured | ✅ | `KnownJacobianSparsityDetector(f.sparsity)` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractMatrix` | Not Structured | ❌ | `KnownJacobianSparsityDetector(f.sparsity)` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractMatrix` | Structured | `Any` | 🔴 | 🔴 | +| - | - | - | - | - | +| `AbstractSparsityDetector` | ❌ | `Any` | `f.sparsity` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractSparsityDetector` | Not Structured | ✅ | `f.sparsity` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractSparsityDetector` | Not Structured | ❌ | `f.sparsity` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractSparsityDetector` | Structured | ✅ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractSparsityDetector` | Structured | ❌ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | + +1. `Structured` means either a `AbstractSparseMatrix` or `ArrayInterface.isstructured(x)` is true. +2. ❌ means not provided (default) +3. ✅ means provided +4. 🔴 means an error will be thrown +5. Providing a colorvec without specifying either sparsity or jac_prototype with a sparse or structured matrix will cause us to ignore the colorvec. +6. The function calls demonstrated above are simply pseudo-code to show the general idea. + ## Case I: Sparse Jacobian Prototype is Provided Let's say you have a Sparse Jacobian Prototype `jac_prototype`, in this case you can @@ -27,19 +55,12 @@ prob = NonlinearProblem( If the `colorvec` is not provided, then it is computed on demand. !!! note - + One thing to be careful about in this case is that `colorvec` is dependent on the autodiff backend used. `ADTypes.mode(ad) isa ADTypes.ForwardMode` will assume that the colorvec is the column colorvec, otherwise we will assume that the colorvec is the row colorvec. -!!! warning - - Previously you could provide a `sparsity` argument to `NonlinearFunction` to specify - the jacobian prototype. However, to avoid confusion, this is now deprecated. Instead, - use the `jac_prototype` argument. `sparsity` must be used to exclusively specify the - sparsity detection algorithm. - ## Case II: Sparsity Detection algorithm is provided If you don't have a Sparse Jacobian Prototype, but you know the which sparsity detection @@ -59,7 +80,7 @@ for more information on sparsity detection algorithms. ## Case III: Sparse AD Type is being Used !!! warning - + This is now deprecated. Please use the previous two cases instead. If you constructed a Nonlinear Solver with a sparse AD type, for example diff --git a/src/internal/jacobian.jl b/src/internal/jacobian.jl index c152fc749..93d10f98b 100644 --- a/src/internal/jacobian.jl +++ b/src/internal/jacobian.jl @@ -172,6 +172,13 @@ function construct_concrete_adtype(f::NonlinearFunction, ad::AbstractADType) end return ad # No sparse AD else + if !sparse_or_structured_prototype(f.jac_prototype) + if SciMLBase.has_colorvec(f) + @warn "`colorvec` is provided but `jac_prototype` is not a sparse \ + or structured matrix. `colorvec` will be ignored." + end + return ad + end return AutoSparse( ad; sparsity_detector = KnownJacobianSparsityDetector(f.jac_prototype), @@ -181,13 +188,14 @@ function construct_concrete_adtype(f::NonlinearFunction, ad::AbstractADType) end else if f.sparsity isa AbstractMatrix - if f.jac_prototype !== nothing && f.jac_prototype !== f.sparsity - throw(ArgumentError("`sparsity::AbstractMatrix` and `jac_prototype` cannot \ - be both provided. Pass only `jac_prototype`.")) + if f.jac_prototype !== f.sparsity + if f.jac_prototype !== nothing && + sparse_or_structured_prototype(f.jac_prototype) + throw(ArgumentError("`sparsity::AbstractMatrix` and a sparse or \ + structured `jac_prototype` cannot be both \ + provided. Pass only `jac_prototype`.")) + end end - Base.depwarn("`sparsity::typeof($(typeof(f.sparsity)))` is deprecated. \ - Pass it as `jac_prototype` instead.", - :NonlinearSolve) return AutoSparse( ad; sparsity_detector = KnownJacobianSparsityDetector(f.sparsity), @@ -209,8 +217,7 @@ function construct_concrete_adtype(f::NonlinearFunction, ad::AbstractADType) coloring_algorithm = GreedyColoringAlgorithm(LargestFirst()) ) else - if f.jac_prototype isa AbstractSparseMatrix || - ArrayInterface.isstructured(f.jac_prototype) + if sparse_or_structured_prototype(f.jac_prototype) if !(sparsity_detector isa NoSparsityDetector) @warn "`jac_prototype` is a sparse matrix but sparsity = $(f.sparsity) \ has also been specified. Ignoring sparsity field and using \ @@ -254,3 +261,8 @@ end get_dense_ad(ad) = ad get_dense_ad(ad::AutoSparse) = ADTypes.dense_ad(ad) + +sparse_or_structured_prototype(::AbstractSparseMatrix) = true +function sparse_or_structured_prototype(prototype::AbstractMatrix) + return ArrayInterface.isstructured(prototype) +end From 8fc028b291821cca0cf5d11f6baaf2aef5c4448a Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Wed, 2 Oct 2024 12:54:28 -0400 Subject: [PATCH 3/3] chore: apply suggestions from code review Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- docs/src/basics/sparsity_detection.md | 48 +++++++++++++-------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/docs/src/basics/sparsity_detection.md b/docs/src/basics/sparsity_detection.md index fc03528ac..de924c398 100644 --- a/docs/src/basics/sparsity_detection.md +++ b/docs/src/basics/sparsity_detection.md @@ -9,30 +9,30 @@ Notation wise we are trying to solve for `x` such that `nlfunc(x) = 0`. ## Big Table for Determining Sparsity Detection and Coloring Algorithms | `f.sparsity` | `f.jac_prototype` | `f.colorvec` | Sparsity Detection | Coloring Algorithm | -| :------------------------- | :---------------- | :----------- | :----------------------------------------------- | :---------------------------------------- | -| ❌ | ❌ | `Any` | `NoSparsityDetector()` | `NoColoringAlgorithm()` | -| ❌ | Not Structured | `Any` | `NoSparsityDetector()` | `NoColoringAlgorithm()` | -| ❌ | Structured | ✅ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | -| ❌ | Structured | ❌ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | +|:-------------------------- |:----------------- |:------------ |:------------------------------------------------ |:----------------------------------------- | +| ❌ | ❌ | `Any` | `NoSparsityDetector()` | `NoColoringAlgorithm()` | +| ❌ | Not Structured | `Any` | `NoSparsityDetector()` | `NoColoringAlgorithm()` | +| ❌ | Structured | ✅ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | +| ❌ | Structured | ❌ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | | - | - | - | - | - | -| `AbstractMatrix` | ❌ | ✅ | `KnownJacobianSparsityDetector(f.sparsity)` | `ConstantColoringAlgorithm(f.colorvec)` | -| `AbstractMatrix` | ❌ | ❌ | `KnownJacobianSparsityDetector(f.sparsity)` | `GreedyColoringAlgorithm(LargestFirst())` | -| `AbstractMatrix` | Not Structured | ✅ | `KnownJacobianSparsityDetector(f.sparsity)` | `ConstantColoringAlgorithm(f.colorvec)` | -| `AbstractMatrix` | Not Structured | ❌ | `KnownJacobianSparsityDetector(f.sparsity)` | `GreedyColoringAlgorithm(LargestFirst())` | -| `AbstractMatrix` | Structured | `Any` | 🔴 | 🔴 | +| `AbstractMatrix` | ❌ | ✅ | `KnownJacobianSparsityDetector(f.sparsity)` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractMatrix` | ❌ | ❌ | `KnownJacobianSparsityDetector(f.sparsity)` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractMatrix` | Not Structured | ✅ | `KnownJacobianSparsityDetector(f.sparsity)` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractMatrix` | Not Structured | ❌ | `KnownJacobianSparsityDetector(f.sparsity)` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractMatrix` | Structured | `Any` | 🔴 | 🔴 | | - | - | - | - | - | -| `AbstractSparsityDetector` | ❌ | `Any` | `f.sparsity` | `GreedyColoringAlgorithm(LargestFirst())` | -| `AbstractSparsityDetector` | Not Structured | ✅ | `f.sparsity` | `ConstantColoringAlgorithm(f.colorvec)` | -| `AbstractSparsityDetector` | Not Structured | ❌ | `f.sparsity` | `GreedyColoringAlgorithm(LargestFirst())` | -| `AbstractSparsityDetector` | Structured | ✅ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `ConstantColoringAlgorithm(f.colorvec)` | -| `AbstractSparsityDetector` | Structured | ❌ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | - -1. `Structured` means either a `AbstractSparseMatrix` or `ArrayInterface.isstructured(x)` is true. -2. ❌ means not provided (default) -3. ✅ means provided -4. 🔴 means an error will be thrown -5. Providing a colorvec without specifying either sparsity or jac_prototype with a sparse or structured matrix will cause us to ignore the colorvec. -6. The function calls demonstrated above are simply pseudo-code to show the general idea. +| `AbstractSparsityDetector` | ❌ | `Any` | `f.sparsity` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractSparsityDetector` | Not Structured | ✅ | `f.sparsity` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractSparsityDetector` | Not Structured | ❌ | `f.sparsity` | `GreedyColoringAlgorithm(LargestFirst())` | +| `AbstractSparsityDetector` | Structured | ✅ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `ConstantColoringAlgorithm(f.colorvec)` | +| `AbstractSparsityDetector` | Structured | ❌ | `KnownJacobianSparsityDetector(f.jac_prototype)` | `GreedyColoringAlgorithm(LargestFirst())` | + + 1. `Structured` means either a `AbstractSparseMatrix` or `ArrayInterface.isstructured(x)` is true. + 2. ❌ means not provided (default) + 3. ✅ means provided + 4. 🔴 means an error will be thrown + 5. Providing a colorvec without specifying either sparsity or jac_prototype with a sparse or structured matrix will cause us to ignore the colorvec. + 6. The function calls demonstrated above are simply pseudo-code to show the general idea. ## Case I: Sparse Jacobian Prototype is Provided @@ -55,7 +55,7 @@ prob = NonlinearProblem( If the `colorvec` is not provided, then it is computed on demand. !!! note - + One thing to be careful about in this case is that `colorvec` is dependent on the autodiff backend used. `ADTypes.mode(ad) isa ADTypes.ForwardMode` will assume that the colorvec is the column colorvec, otherwise we will assume that the colorvec is the @@ -80,7 +80,7 @@ for more information on sparsity detection algorithms. ## Case III: Sparse AD Type is being Used !!! warning - + This is now deprecated. Please use the previous two cases instead. If you constructed a Nonlinear Solver with a sparse AD type, for example