From 3b3bf9b70f3e59f3909f9beb5e7d15473ed4a9f4 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Sun, 10 Sep 2023 18:24:46 -0400 Subject: [PATCH 1/2] Use multiple forward diff trials to generate approximate Jacobian sparsity pattern --- Project.toml | 3 ++- src/SparseDiffTools.jl | 4 ++-- src/highlevel/coloring.jl | 26 ++++++++++++++++++++++++++ src/highlevel/common.jl | 25 +++++++++++++++++++++++++ test/test_sparse_jacobian.jl | 2 +- 5 files changed, 56 insertions(+), 4 deletions(-) diff --git a/Project.toml b/Project.toml index f3eda524..c34c485a 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "SparseDiffTools" uuid = "47a9eef4-7e08-11e9-0b38-333d64bd3804" authors = ["Pankaj Mishra ", "Chris Rackauckas "] -version = "2.6.0" +version = "2.7.0" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" @@ -14,6 +14,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" PackageExtensionCompat = "65ce6f38-6b18-4e1d-a461-8949797d7930" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" SciMLOperators = "c0aeaf25-5076-4817-a8d5-81caf7dfa961" Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46" diff --git a/src/SparseDiffTools.jl b/src/SparseDiffTools.jl index 68633d78..52727b61 100644 --- a/src/SparseDiffTools.jl +++ b/src/SparseDiffTools.jl @@ -17,7 +17,7 @@ using ArrayInterface, SparseArrays import ArrayInterface: matrix_colors import StaticArrays # Others -using SciMLOperators, LinearAlgebra +using SciMLOperators, LinearAlgebra, Random import DataStructures: DisjointSets, find_root!, union! import SciMLOperators: update_coefficients, update_coefficients! import Setfield: @set! @@ -89,7 +89,7 @@ export update_coefficients, update_coefficients!, value! export AutoSparseEnzyme export NoSparsityDetection, SymbolicsSparsityDetection, JacPrototypeSparsityDetection, - PrecomputedJacobianColorvec, AutoSparsityDetection + PrecomputedJacobianColorvec, ApproximateJacobianSparsity, AutoSparsityDetection export sparse_jacobian, sparse_jacobian_cache, sparse_jacobian! export init_jacobian diff --git a/src/highlevel/coloring.jl b/src/highlevel/coloring.jl index 3e382600..1594e7b9 100644 --- a/src/highlevel/coloring.jl +++ b/src/highlevel/coloring.jl @@ -30,5 +30,31 @@ function (alg::PrecomputedJacobianColorvec)(ad::AbstractSparseADType, args...; k return MatrixColoringResult(colorvec, J, nz_rows, nz_cols) end +# Approximate Jacobian Sparsity Detection +## Right now we hardcode it to use `ForwardDiff` +function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f, x; kwargs...) + @unpack ntrials, rng = alg + cfg = ForwardDiff.JacobianConfig(f, x) + J = sum(1:ntrials) do _ + local x_ = similar(x) + rand!(rng, x_) + abs.(ForwardDiff.jacobian(f, x_, cfg)) + end + return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x; + kwargs...) +end + +function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f!, fx, x; kwargs...) + @unpack ntrials, rng = alg + cfg = ForwardDiff.JacobianConfig(f!, fx, x) + J = sum(1:ntrials) do _ + local x_ = similar(x) + rand!(rng, x_) + abs.(ForwardDiff.jacobian(f!, fx, x_, cfg)) + end + return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f!, fx, + x; kwargs...) +end + # TODO: Heuristics to decide whether to use Sparse Differentiation or not # Simple Idea: Check min(max(colorvec_cols), max(colorvec_rows)) diff --git a/src/highlevel/common.jl b/src/highlevel/common.jl index 6f6745c4..a2209005 100644 --- a/src/highlevel/common.jl +++ b/src/highlevel/common.jl @@ -112,6 +112,31 @@ function _get_colorvec(alg::PrecomputedJacobianColorvec, ::AbstractReverseMode) return cvec end +""" + ApproximateJacobianSparsity(; ntrials = 5, rng = Random.default_rng(), + alg = GreedyD1Color()) + +Use `ntrials` random vectors to compute the sparsity pattern of the Jacobian. This is an +approximate method and the sparsity pattern may not be exact. + +## Keyword Arguments + + - `ntrials`: The number of random vectors to use for computing the sparsity pattern + - `rng`: The random number generator used for generating the random vectors + - `alg`: The algorithm used for computing the matrix colors +""" +struct ApproximateJacobianSparsity{R <: AbstractRNG, + A <: ArrayInterface.ColoringAlgorithm} <: AbstractSparsityDetection + ntrials::Int + rng::R + alg::A +end + +function ApproximateJacobianSparsity(; ntrials::Int = 3, + rng::AbstractRNG = Random.default_rng(), alg = GreedyD1Color()) + return ApproximateJacobianSparsity(ntrials, rng, alg) +end + # No one should be using this currently Base.@kwdef struct AutoSparsityDetection{A <: ArrayInterface.ColoringAlgorithm} <: AbstractSparsityDetection diff --git a/test/test_sparse_jacobian.jl b/test/test_sparse_jacobian.jl index 3ef4b206..290d350d 100644 --- a/test/test_sparse_jacobian.jl +++ b/test/test_sparse_jacobian.jl @@ -30,7 +30,7 @@ row_colorvec = SparseDiffTools.matrix_colors(J_sparsity; partition_by_rows = tru col_colorvec = SparseDiffTools.matrix_colors(J_sparsity; partition_by_rows = false) SPARSITY_DETECTION_ALGS = [JacPrototypeSparsityDetection(; jac_prototype = J_sparsity), - SymbolicsSparsityDetection(), NoSparsityDetection(), + SymbolicsSparsityDetection(), NoSparsityDetection(), ApproximateJacobianSparsity(), PrecomputedJacobianColorvec(; jac_prototype = J_sparsity, row_colorvec, col_colorvec)] @testset "High-Level API" begin From 1f88ad15f19f8e95c9601c9d1cf1512abfa3f84a Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Sun, 10 Sep 2023 20:17:51 -0400 Subject: [PATCH 2/2] Fix type instability --- src/highlevel/coloring.jl | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/src/highlevel/coloring.jl b/src/highlevel/coloring.jl index 1594e7b9..774b8010 100644 --- a/src/highlevel/coloring.jl +++ b/src/highlevel/coloring.jl @@ -32,25 +32,29 @@ end # Approximate Jacobian Sparsity Detection ## Right now we hardcode it to use `ForwardDiff` -function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f, x; kwargs...) +function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f, x; fx = nothing, + kwargs...) @unpack ntrials, rng = alg + fx = fx === nothing ? f(x) : fx + J = fill!(similar(fx, length(fx), length(x)), 0) cfg = ForwardDiff.JacobianConfig(f, x) - J = sum(1:ntrials) do _ - local x_ = similar(x) - rand!(rng, x_) - abs.(ForwardDiff.jacobian(f, x_, cfg)) + for _ in 1:ntrials + x_ = similar(x) + randn!(rng, x_) + J .+= abs.(ForwardDiff.jacobian(f, x_, cfg)) end return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x; - kwargs...) + fx, kwargs...) end function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f!, fx, x; kwargs...) @unpack ntrials, rng = alg cfg = ForwardDiff.JacobianConfig(f!, fx, x) - J = sum(1:ntrials) do _ - local x_ = similar(x) - rand!(rng, x_) - abs.(ForwardDiff.jacobian(f!, fx, x_, cfg)) + J = fill!(similar(fx, length(fx), length(x)), 0) + for _ in 1:ntrials + x_ = similar(x) + randn!(rng, x_) + J .+= abs.(ForwardDiff.jacobian(f!, fx, x_, cfg)) end return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f!, fx, x; kwargs...)