Skip to content
This repository was archived by the owner on Apr 23, 2025. It is now read-only.

Commit 0a42304

Browse files
Merge pull request #295 from gdalle/adtypes_v1
Upgrade ADTypes to v1
2 parents f537806 + 6f001e2 commit 0a42304

19 files changed

+194
-141
lines changed

.github/workflows/CI.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
- InterfaceI
1818
version:
1919
- '1' # Latest Release
20-
- '1.6' # Current LTS
20+
- '1.10' # future LTS
2121
steps:
2222
- uses: actions/checkout@v4
2323
- uses: julia-actions/setup-julia@v2

.github/workflows/Downstream.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ jobs:
1414
strategy:
1515
fail-fast: false
1616
matrix:
17-
julia-version: [1,1.6]
17+
julia-version: [1,1.10]
1818
os: [ubuntu-latest]
1919
package:
2020
- {user: SciML, repo: OrdinaryDiffEq.jl, group: InterfaceII}

Project.toml

+23-5
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
name = "SparseDiffTools"
22
uuid = "47a9eef4-7e08-11e9-0b38-333d64bd3804"
3-
authors = ["Pankaj Mishra <[email protected]>", "Chris Rackauckas <[email protected]>"]
4-
version = "2.18.0"
3+
authors = [
4+
"Pankaj Mishra <[email protected]>",
5+
"Chris Rackauckas <[email protected]>",
6+
]
7+
version = "2.19.0"
58

69
[deps]
710
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
@@ -40,7 +43,7 @@ SparseDiffToolsSymbolicsExt = "Symbolics"
4043
SparseDiffToolsZygoteExt = "Zygote"
4144

4245
[compat]
43-
ADTypes = "0.2.6"
46+
ADTypes = "1.0.0"
4447
Adapt = "3, 4"
4548
ArrayInterface = "7.4.2"
4649
Compat = "4"
@@ -65,9 +68,10 @@ Tricks = "0.1.6"
6568
UnPack = "1"
6669
VertexSafeGraphs = "0.2"
6770
Zygote = "0.6"
68-
julia = "1.6"
71+
julia = "1.10"
6972

7073
[extras]
74+
AllocCheck = "9b6a8646-10ed-4001-bbdc-1d2f46dfbb1a"
7175
BandedMatrices = "aae01518-5342-5314-be14-df237901396f"
7276
BlockBandedMatrices = "ffab5731-97b5-5995-9138-79e8c1846df0"
7377
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
@@ -83,4 +87,18 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
8387
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
8488

8589
[targets]
86-
test = ["Test", "BandedMatrices", "BlockBandedMatrices", "Enzyme", "IterativeSolvers", "Pkg", "Random", "SafeTestsets", "Symbolics", "Zygote", "StaticArrays"]
90+
test = [
91+
"Test",
92+
"AllocCheck",
93+
"BandedMatrices",
94+
"BlockBandedMatrices",
95+
"Enzyme",
96+
"IterativeSolvers",
97+
"Pkg",
98+
"PolyesterForwardDiff",
99+
"Random",
100+
"SafeTestsets",
101+
"Symbolics",
102+
"Zygote",
103+
"StaticArrays",
104+
]

README.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,8 @@ We need to perform the following steps to utilize SparseDiffTools:
5959
the sparsity pattern. (Note that `Symbolics.jl` must be explicitly loaded before
6060
using this functionality.)
6161
2. Now choose an AD backend from `ADTypes.jl`:
62-
1. If using a Non `*Sparse*` type, then we will not use sparsity detection.
63-
2. All other sparse AD types will internally compute the proper sparsity pattern, and
62+
1. If using a standard type like `AutoForwardDiff()`, then we will not use sparsity detection.
63+
2. If you wrap it inside `AutoSparse(AutoForwardDiff())`, then we will internally compute the proper sparsity pattern, and
6464
try to exploit that.
6565
3. Now there are 2 options:
6666
1. Precompute the cache using `sparse_jacobian_cache` and use the `sparse_jacobian` or
@@ -73,7 +73,7 @@ We need to perform the following steps to utilize SparseDiffTools:
7373
using Symbolics
7474

7575
sd = SymbolicsSparsityDetection()
76-
adtype = AutoSparseFiniteDiff()
76+
adtype = AutoSparse(AutoFiniteDiff())
7777
x = rand(30)
7878
y = similar(x)
7979

docs/src/index.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ We need to perform the following steps to utilize SparseDiffTools:
6363
using Symbolics
6464

6565
sd = SymbolicsSparsityDetection()
66-
adtype = AutoSparseFiniteDiff()
66+
adtype = AutoSparse(AutoFiniteDiff())
6767
x = rand(30)
6868
y = similar(x)
6969

ext/SparseDiffToolsEnzymeExt.jl

+8-8
Original file line numberDiff line numberDiff line change
@@ -2,36 +2,36 @@ module SparseDiffToolsEnzymeExt
22

33
import ArrayInterface: fast_scalar_indexing
44
import SparseDiffTools: __f̂, __maybe_copy_x, __jacobian!, __gradient, __gradient!,
5-
AutoSparseEnzyme, __test_backend_loaded
5+
__test_backend_loaded
66
# FIXME: For Enzyme we currently assume reverse mode
7-
import ADTypes: AutoEnzyme
7+
import ADTypes: AutoSparse, AutoEnzyme
88
using Enzyme
99

1010
using ForwardDiff
1111

12-
@inline __test_backend_loaded(::Union{AutoSparseEnzyme, AutoEnzyme}) = nothing
12+
@inline __test_backend_loaded(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}) = nothing
1313

1414
## Satisfying High-Level Interface for Sparse Jacobians
15-
function __gradient(::Union{AutoSparseEnzyme, AutoEnzyme}, f, x, cols)
15+
function __gradient(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f, x, cols)
1616
dx = zero(x)
1717
autodiff(Reverse, __f̂, Const(f), Duplicated(x, dx), Const(cols))
1818
return vec(dx)
1919
end
2020

21-
function __gradient!(::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x, cols)
21+
function __gradient!(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f!, fx, x, cols)
2222
dx = zero(x)
2323
dfx = zero(fx)
2424
autodiff(Reverse, __f̂, Active, Const(f!), Duplicated(fx, dfx), Duplicated(x, dx),
2525
Const(cols))
2626
return dx
2727
end
2828

29-
function __jacobian!(J::AbstractMatrix, ::Union{AutoSparseEnzyme, AutoEnzyme}, f, x)
29+
function __jacobian!(J::AbstractMatrix, ::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f, x)
3030
J .= jacobian(Reverse, f, x, Val(size(J, 1)))
3131
return J
3232
end
3333

34-
@views function __jacobian!(J, ad::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x)
34+
@views function __jacobian!(J, ad::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f!, fx, x)
3535
# This version is slowish not sure how to do jacobians for inplace functions
3636
@warn "Current code for computing jacobian for inplace functions in Enzyme is slow." maxlog=1
3737
dfx = zero(fx)
@@ -58,6 +58,6 @@ end
5858
return J
5959
end
6060

61-
__maybe_copy_x(::Union{AutoSparseEnzyme, AutoEnzyme}, x::SubArray) = copy(x)
61+
__maybe_copy_x(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, x::SubArray) = copy(x)
6262

6363
end

ext/SparseDiffToolsPolyesterForwardDiffExt.jl

+7-6
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ using ADTypes, SparseDiffTools, PolyesterForwardDiff, UnPack, Random, SparseArra
44
import ForwardDiff
55
import SparseDiffTools: AbstractMaybeSparseJacobianCache, AbstractMaybeSparsityDetection,
66
ForwardColorJacCache, NoMatrixColoring, sparse_jacobian_cache,
7+
sparse_jacobian_cache_aux,
78
sparse_jacobian!,
89
sparse_jacobian_static_array, __standard_tag, __chunksize,
910
polyesterforwarddiff_color_jacobian
@@ -17,8 +18,8 @@ struct PolyesterForwardDiffJacobianCache{CO, CA, J, FX, X} <:
1718
x::X
1819
end
1920

20-
function sparse_jacobian_cache(
21-
ad::Union{AutoSparsePolyesterForwardDiff, AutoPolyesterForwardDiff},
21+
function sparse_jacobian_cache_aux(::ADTypes.ForwardMode,
22+
ad::Union{AutoSparse{<:AutoPolyesterForwardDiff}, AutoPolyesterForwardDiff},
2223
sd::AbstractMaybeSparsityDetection, f::F, x; fx = nothing) where {F}
2324
coloring_result = sd(ad, f, x)
2425
fx = fx === nothing ? similar(f(x)) : fx
@@ -35,8 +36,8 @@ function sparse_jacobian_cache(
3536
return PolyesterForwardDiffJacobianCache(coloring_result, cache, jac_prototype, fx, x)
3637
end
3738

38-
function sparse_jacobian_cache(
39-
ad::Union{AutoSparsePolyesterForwardDiff, AutoPolyesterForwardDiff},
39+
function sparse_jacobian_cache_aux(::ADTypes.ForwardMode,
40+
ad::Union{AutoSparse{<:AutoPolyesterForwardDiff}, AutoPolyesterForwardDiff},
4041
sd::AbstractMaybeSparsityDetection, f!::F, fx, x) where {F}
4142
coloring_result = sd(ad, f!, fx, x)
4243
if coloring_result isa NoMatrixColoring
@@ -77,7 +78,7 @@ end
7778

7879
## Approximate Sparsity Detection
7980
function (alg::ApproximateJacobianSparsity)(
80-
ad::AutoSparsePolyesterForwardDiff, f::F, x; fx = nothing, kwargs...) where {F}
81+
ad::AutoSparse{<:AutoPolyesterForwardDiff}, f::F, x; fx = nothing, kwargs...) where {F}
8182
@unpack ntrials, rng = alg
8283
fx = fx === nothing ? f(x) : fx
8384
ck = __chunksize(ad, x)
@@ -94,7 +95,7 @@ function (alg::ApproximateJacobianSparsity)(
9495
end
9596

9697
function (alg::ApproximateJacobianSparsity)(
97-
ad::AutoSparsePolyesterForwardDiff, f::F, fx, x;
98+
ad::AutoSparse{<:AutoPolyesterForwardDiff}, f::F, fx, x;
9899
kwargs...) where {F}
99100
@unpack ntrials, rng = alg
100101
ck = __chunksize(ad, x)

ext/SparseDiffToolsSymbolicsExt.jl

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
module SparseDiffToolsSymbolicsExt
22

33
using SparseDiffTools, Symbolics
4-
import SparseDiffTools: AbstractSparseADType
4+
import SparseDiffTools: AutoSparse
55

6-
function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f, x; fx = nothing,
6+
function (alg::SymbolicsSparsityDetection)(ad::AutoSparse, f, x; fx = nothing,
77
kwargs...)
88
fx = fx === nothing ? similar(f(x)) : dx
99
f!(y, x) = (y .= f(x))
@@ -12,7 +12,7 @@ function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f, x; fx =
1212
return _alg(ad, f, x; fx, kwargs...)
1313
end
1414

15-
function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f!, fx, x; kwargs...)
15+
function (alg::SymbolicsSparsityDetection)(ad::AutoSparse, f!, fx, x; kwargs...)
1616
J = Symbolics.jacobian_sparsity(f!, fx, x)
1717
_alg = JacPrototypeSparsityDetection(J, alg.alg)
1818
return _alg(ad, f!, fx, x; kwargs...)

ext/SparseDiffToolsZygoteExt.jl

+9-6
Original file line numberDiff line numberDiff line change
@@ -11,25 +11,27 @@ import SparseDiffTools: numback_hesvec!,
1111
numback_hesvec, autoback_hesvec!, autoback_hesvec, auto_vecjac!,
1212
auto_vecjac
1313
import SparseDiffTools: __f̂, __jacobian!, __gradient, __gradient!
14-
import ADTypes: AutoZygote, AutoSparseZygote
14+
import ADTypes: AutoZygote, AutoSparse
1515

16-
@inline __test_backend_loaded(::Union{AutoSparseZygote, AutoZygote}) = nothing
16+
@inline __test_backend_loaded(::Union{AutoSparse{<:AutoZygote}, AutoZygote}) = nothing
1717

1818
## Satisfying High-Level Interface for Sparse Jacobians
19-
function __gradient(::Union{AutoSparseZygote, AutoZygote}, f::F, x, cols) where {F}
19+
function __gradient(::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f::F, x, cols) where {F}
2020
_, ∂x, _ = Zygote.gradient(__f̂, f, x, cols)
2121
return vec(∂x)
2222
end
2323

24-
function __gradient!(::Union{AutoSparseZygote, AutoZygote}, f!::F, fx, x, cols) where {F}
24+
function __gradient!(
25+
::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f!::F, fx, x, cols) where {F}
2526
return error("Zygote.jl cannot differentiate in-place (mutating) functions.")
2627
end
2728

2829
# Zygote doesn't provide a way to accumulate directly into `J`. So we modify the code from
2930
# https://github.com/FluxML/Zygote.jl/blob/82c7a000bae7fb0999275e62cc53ddb61aed94c7/src/lib/grad.jl#L140-L157C4
3031
import Zygote: _jvec, _eyelike, _gradcopy!
3132

32-
@views function __jacobian!(J::AbstractMatrix, ::Union{AutoSparseZygote, AutoZygote}, f::F,
33+
@views function __jacobian!(
34+
J::AbstractMatrix, ::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f::F,
3335
x) where {F}
3436
y, back = Zygote.pullback(_jvec f, x)
3537
δ = _eyelike(y)
@@ -40,7 +42,8 @@ import Zygote: _jvec, _eyelike, _gradcopy!
4042
return J
4143
end
4244

43-
function __jacobian!(_, ::Union{AutoSparseZygote, AutoZygote}, f!::F, fx, x) where {F}
45+
function __jacobian!(
46+
_, ::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f!::F, fx, x) where {F}
4447
return error("Zygote.jl cannot differentiate in-place (mutating) functions.")
4548
end
4649

src/SparseDiffTools.jl

+6-4
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,8 @@ import Graphs: SimpleGraph
99
# Differentiation
1010
using FiniteDiff, ForwardDiff
1111
@reexport using ADTypes
12-
import ADTypes: AbstractADType, AutoSparseZygote, AbstractSparseForwardMode,
13-
AbstractSparseReverseMode, AbstractSparseFiniteDifferences,
14-
AbstractReverseMode
12+
import ADTypes: AbstractADType, AutoSparse, ForwardMode, ForwardOrReverseMode, ReverseMode,
13+
SymbolicMode, mode
1514
import ForwardDiff: Dual, jacobian, partials, DEFAULT_CHUNK_THRESHOLD
1615
# Array Packages
1716
using ArrayInterface, SparseArrays
@@ -32,6 +31,9 @@ end
3231

3332
abstract type AbstractAutoDiffVecProd end
3433

34+
my_dense_ad(ad::AbstractADType) = ad
35+
my_dense_ad(ad::AutoSparse) = ADTypes.dense_ad(ad)
36+
3537
include("coloring/high_level.jl")
3638
include("coloring/backtracking_coloring.jl")
3739
include("coloring/contraction_coloring.jl")
@@ -52,6 +54,7 @@ include("highlevel/common.jl")
5254
include("highlevel/coloring.jl")
5355
include("highlevel/forward_mode.jl")
5456
include("highlevel/reverse_mode.jl")
57+
include("highlevel/forward_or_reverse_mode.jl")
5558
include("highlevel/finite_diff.jl")
5659

5760
Base.@pure __parameterless_type(T) = Base.typename(T).wrapper
@@ -90,7 +93,6 @@ export JacVec, HesVec, HesVecGrad, VecJac
9093
export update_coefficients, update_coefficients!, value!
9194

9295
# High Level Interface: sparse_jacobian
93-
export AutoSparseEnzyme
9496

9597
export NoSparsityDetection, SymbolicsSparsityDetection, JacPrototypeSparsityDetection,
9698
PrecomputedJacobianColorvec, ApproximateJacobianSparsity, AutoSparsityDetection

src/highlevel/coloring.jl

+12-12
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,17 @@ struct NoMatrixColoring end
1414
(::AbstractMaybeSparsityDetection)(::AbstractADType, args...; kws...) = NoMatrixColoring()
1515

1616
# Prespecified Jacobian Structure
17-
function (alg::JacPrototypeSparsityDetection)(ad::AbstractSparseADType, args...; kwargs...)
17+
function (alg::JacPrototypeSparsityDetection)(ad::AutoSparse, args...; kwargs...)
1818
J = alg.jac_prototype
1919
colorvec = matrix_colors(J, alg.alg;
20-
partition_by_rows = ad isa AbstractSparseReverseMode)
20+
partition_by_rows = mode(ad) isa ReverseMode)
2121
(nz_rows, nz_cols) = ArrayInterface.findstructralnz(J)
2222
return MatrixColoringResult(colorvec, J, nz_rows, nz_cols)
2323
end
2424

2525
# Prespecified Colorvecs
26-
function (alg::PrecomputedJacobianColorvec)(ad::AbstractSparseADType, args...; kwargs...)
27-
colorvec = _get_colorvec(alg, ad)
26+
function (alg::PrecomputedJacobianColorvec)(ad::AutoSparse, args...; kwargs...)
27+
colorvec = _get_colorvec(alg, mode(ad))
2828
J = alg.jac_prototype
2929
(nz_rows, nz_cols) = ArrayInterface.findstructralnz(J)
3030
return MatrixColoringResult(colorvec, J, nz_rows, nz_cols)
@@ -33,10 +33,10 @@ end
3333
# Approximate Jacobian Sparsity Detection
3434
## Right now we hardcode it to use `ForwardDiff`
3535
function (alg::ApproximateJacobianSparsity)(
36-
ad::AbstractSparseADType, f::F, x; fx = nothing,
36+
ad::AutoSparse, f::F, x; fx = nothing,
3737
kwargs...) where {F}
38-
if !(ad isa AutoSparseForwardDiff)
39-
if ad isa AutoSparsePolyesterForwardDiff
38+
if !(ad isa AutoSparse{<:AutoForwardDiff})
39+
if ad isa AutoSparse{<:AutoPolyesterForwardDiff}
4040
@warn "$(ad) is only supported if `PolyesterForwardDiff` is explicitly loaded. Using ForwardDiff instead." maxlog=1
4141
else
4242
@warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1
@@ -57,10 +57,10 @@ function (alg::ApproximateJacobianSparsity)(
5757
fx, kwargs...)
5858
end
5959

60-
function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, fx, x;
60+
function (alg::ApproximateJacobianSparsity)(ad::AutoSparse, f::F, fx, x;
6161
kwargs...) where {F}
62-
if !(ad isa AutoSparseForwardDiff)
63-
if ad isa AutoSparsePolyesterForwardDiff
62+
if !(ad isa AutoSparse{<:AutoForwardDiff})
63+
if ad isa AutoSparse{<:AutoPolyesterForwardDiff}
6464
@warn "$(ad) is only supported if `PolyesterForwardDiff` is explicitly loaded. Using ForwardDiff instead." maxlog=1
6565
else
6666
@warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1
@@ -81,7 +81,7 @@ function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, fx,
8181
end
8282

8383
function (alg::ApproximateJacobianSparsity)(
84-
ad::AutoSparseFiniteDiff, f::F, x; fx = nothing,
84+
ad::AutoSparse{<:AutoFiniteDiff}, f::F, x; fx = nothing,
8585
kwargs...) where {F}
8686
@unpack ntrials, rng = alg
8787
fx = fx === nothing ? f(x) : fx
@@ -98,7 +98,7 @@ function (alg::ApproximateJacobianSparsity)(
9898
fx, kwargs...)
9999
end
100100

101-
function (alg::ApproximateJacobianSparsity)(ad::AutoSparseFiniteDiff, f!::F, fx, x;
101+
function (alg::ApproximateJacobianSparsity)(ad::AutoSparse{<:AutoFiniteDiff}, f!::F, fx, x;
102102
kwargs...) where {F}
103103
@unpack ntrials, rng = alg
104104
cache = FiniteDiff.JacobianCache(x, fx)

0 commit comments

Comments
 (0)