Skip to content

Commit

Permalink
Upgrade ADTypes to v1
Browse files Browse the repository at this point in the history
  • Loading branch information
gdalle committed Apr 22, 2024
1 parent f537806 commit dbd8a73
Show file tree
Hide file tree
Showing 18 changed files with 81 additions and 88 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- InterfaceI
version:
- '1' # Latest Release
- '1.6' # Current LTS
- '1.10' # future LTS
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v2
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/Downstream.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
strategy:
fail-fast: false
matrix:
julia-version: [1,1.6]
julia-version: [1,1.10]
os: [ubuntu-latest]
package:
- {user: SciML, repo: OrdinaryDiffEq.jl, group: InterfaceII}
Expand Down
6 changes: 3 additions & 3 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "SparseDiffTools"
uuid = "47a9eef4-7e08-11e9-0b38-333d64bd3804"
authors = ["Pankaj Mishra <[email protected]>", "Chris Rackauckas <[email protected]>"]
version = "2.18.0"
version = "2.19.0"

[deps]
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
Expand Down Expand Up @@ -40,7 +40,7 @@ SparseDiffToolsSymbolicsExt = "Symbolics"
SparseDiffToolsZygoteExt = "Zygote"

[compat]
ADTypes = "0.2.6"
ADTypes = "1.0.0"
Adapt = "3, 4"
ArrayInterface = "7.4.2"
Compat = "4"
Expand All @@ -65,7 +65,7 @@ Tricks = "0.1.6"
UnPack = "1"
VertexSafeGraphs = "0.2"
Zygote = "0.6"
julia = "1.6"
julia = "1.10"

[extras]
BandedMatrices = "aae01518-5342-5314-be14-df237901396f"
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ We need to perform the following steps to utilize SparseDiffTools:
the sparsity pattern. (Note that `Symbolics.jl` must be explicitly loaded before
using this functionality.)
2. Now choose an AD backend from `ADTypes.jl`:
1. If using a Non `*Sparse*` type, then we will not use sparsity detection.
2. All other sparse AD types will internally compute the proper sparsity pattern, and
1. If using a standard type like `AutoForwardDiff()`, then we will not use sparsity detection.
2. If you wrap it inside `AutoSparse(AutoForwardDiff())`, then we will internally compute the proper sparsity pattern, and
try to exploit that.
3. Now there are 2 options:
1. Precompute the cache using `sparse_jacobian_cache` and use the `sparse_jacobian` or
Expand All @@ -73,7 +73,7 @@ We need to perform the following steps to utilize SparseDiffTools:
using Symbolics

sd = SymbolicsSparsityDetection()
adtype = AutoSparseFiniteDiff()
adtype = AutoSparse(AutoFiniteDiff())
x = rand(30)
y = similar(x)

Expand Down
2 changes: 1 addition & 1 deletion docs/src/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ We need to perform the following steps to utilize SparseDiffTools:
using Symbolics

sd = SymbolicsSparsityDetection()
adtype = AutoSparseFiniteDiff()
adtype = AutoSparse(AutoFiniteDiff())
x = rand(30)
y = similar(x)

Expand Down
16 changes: 8 additions & 8 deletions ext/SparseDiffToolsEnzymeExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,36 +2,36 @@ module SparseDiffToolsEnzymeExt

import ArrayInterface: fast_scalar_indexing
import SparseDiffTools: __f̂, __maybe_copy_x, __jacobian!, __gradient, __gradient!,
AutoSparseEnzyme, __test_backend_loaded
AutoSparse{<:AutoEnzyme}, __test_backend_loaded
# FIXME: For Enzyme we currently assume reverse mode
import ADTypes: AutoEnzyme
import ADTypes: AutoSparse, AutoEnzyme
using Enzyme

using ForwardDiff

@inline __test_backend_loaded(::Union{AutoSparseEnzyme, AutoEnzyme}) = nothing
@inline __test_backend_loaded(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}) = nothing

## Satisfying High-Level Interface for Sparse Jacobians
function __gradient(::Union{AutoSparseEnzyme, AutoEnzyme}, f, x, cols)
function __gradient(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f, x, cols)
dx = zero(x)
autodiff(Reverse, __f̂, Const(f), Duplicated(x, dx), Const(cols))
return vec(dx)
end

function __gradient!(::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x, cols)
function __gradient!(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f!, fx, x, cols)
dx = zero(x)
dfx = zero(fx)
autodiff(Reverse, __f̂, Active, Const(f!), Duplicated(fx, dfx), Duplicated(x, dx),
Const(cols))
return dx
end

function __jacobian!(J::AbstractMatrix, ::Union{AutoSparseEnzyme, AutoEnzyme}, f, x)
function __jacobian!(J::AbstractMatrix, ::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f, x)
J .= jacobian(Reverse, f, x, Val(size(J, 1)))
return J
end

@views function __jacobian!(J, ad::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x)
@views function __jacobian!(J, ad::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f!, fx, x)
# This version is slowish not sure how to do jacobians for inplace functions
@warn "Current code for computing jacobian for inplace functions in Enzyme is slow." maxlog=1
dfx = zero(fx)
Expand All @@ -58,6 +58,6 @@ end
return J
end

__maybe_copy_x(::Union{AutoSparseEnzyme, AutoEnzyme}, x::SubArray) = copy(x)
__maybe_copy_x(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, x::SubArray) = copy(x)

end
8 changes: 4 additions & 4 deletions ext/SparseDiffToolsPolyesterForwardDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ struct PolyesterForwardDiffJacobianCache{CO, CA, J, FX, X} <:
end

function sparse_jacobian_cache(
ad::Union{AutoSparsePolyesterForwardDiff, AutoPolyesterForwardDiff},
ad::Union{AutoSparse{<:AutoPolyesterForwardDiff}, AutoPolyesterForwardDiff},
sd::AbstractMaybeSparsityDetection, f::F, x; fx = nothing) where {F}
coloring_result = sd(ad, f, x)
fx = fx === nothing ? similar(f(x)) : fx
Expand All @@ -36,7 +36,7 @@ function sparse_jacobian_cache(
end

function sparse_jacobian_cache(
ad::Union{AutoSparsePolyesterForwardDiff, AutoPolyesterForwardDiff},
ad::Union{AutoSparse{<:AutoPolyesterForwardDiff}, AutoPolyesterForwardDiff},
sd::AbstractMaybeSparsityDetection, f!::F, fx, x) where {F}
coloring_result = sd(ad, f!, fx, x)
if coloring_result isa NoMatrixColoring
Expand Down Expand Up @@ -77,7 +77,7 @@ end

## Approximate Sparsity Detection
function (alg::ApproximateJacobianSparsity)(
ad::AutoSparsePolyesterForwardDiff, f::F, x; fx = nothing, kwargs...) where {F}
ad::AutoSparse{<:AutoPolyesterForwardDiff}, f::F, x; fx = nothing, kwargs...) where {F}
@unpack ntrials, rng = alg
fx = fx === nothing ? f(x) : fx
ck = __chunksize(ad, x)
Expand All @@ -94,7 +94,7 @@ function (alg::ApproximateJacobianSparsity)(
end

function (alg::ApproximateJacobianSparsity)(
ad::AutoSparsePolyesterForwardDiff, f::F, fx, x;
ad::AutoSparse{<:AutoPolyesterForwardDiff}, f::F, fx, x;
kwargs...) where {F}
@unpack ntrials, rng = alg
ck = __chunksize(ad, x)
Expand Down
6 changes: 3 additions & 3 deletions ext/SparseDiffToolsSymbolicsExt.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
module SparseDiffToolsSymbolicsExt

using SparseDiffTools, Symbolics
import SparseDiffTools: AbstractSparseADType
import SparseDiffTools: AutoSparse

function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f, x; fx = nothing,
function (alg::SymbolicsSparsityDetection)(ad::AutoSparse, f, x; fx = nothing,
kwargs...)
fx = fx === nothing ? similar(f(x)) : dx
f!(y, x) = (y .= f(x))
Expand All @@ -12,7 +12,7 @@ function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f, x; fx =
return _alg(ad, f, x; fx, kwargs...)
end

function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f!, fx, x; kwargs...)
function (alg::SymbolicsSparsityDetection)(ad::AutoSparse, f!, fx, x; kwargs...)
J = Symbolics.jacobian_sparsity(f!, fx, x)
_alg = JacPrototypeSparsityDetection(J, alg.alg)
return _alg(ad, f!, fx, x; kwargs...)
Expand Down
12 changes: 6 additions & 6 deletions ext/SparseDiffToolsZygoteExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,25 +11,25 @@ import SparseDiffTools: numback_hesvec!,
numback_hesvec, autoback_hesvec!, autoback_hesvec, auto_vecjac!,
auto_vecjac
import SparseDiffTools: __f̂, __jacobian!, __gradient, __gradient!
import ADTypes: AutoZygote, AutoSparseZygote
import ADTypes: AutoZygote, AutoSparse{<:AutoZygote}

@inline __test_backend_loaded(::Union{AutoSparseZygote, AutoZygote}) = nothing
@inline __test_backend_loaded(::Union{AutoSparse{<:AutoZygote}, AutoZygote}) = nothing

## Satisfying High-Level Interface for Sparse Jacobians
function __gradient(::Union{AutoSparseZygote, AutoZygote}, f::F, x, cols) where {F}
function __gradient(::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f::F, x, cols) where {F}
_, ∂x, _ = Zygote.gradient(__f̂, f, x, cols)
return vec(∂x)
end

function __gradient!(::Union{AutoSparseZygote, AutoZygote}, f!::F, fx, x, cols) where {F}
function __gradient!(::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f!::F, fx, x, cols) where {F}
return error("Zygote.jl cannot differentiate in-place (mutating) functions.")
end

# Zygote doesn't provide a way to accumulate directly into `J`. So we modify the code from
# https://github.com/FluxML/Zygote.jl/blob/82c7a000bae7fb0999275e62cc53ddb61aed94c7/src/lib/grad.jl#L140-L157C4
import Zygote: _jvec, _eyelike, _gradcopy!

@views function __jacobian!(J::AbstractMatrix, ::Union{AutoSparseZygote, AutoZygote}, f::F,
@views function __jacobian!(J::AbstractMatrix, ::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f::F,
x) where {F}
y, back = Zygote.pullback(_jvec f, x)
δ = _eyelike(y)
Expand All @@ -40,7 +40,7 @@ import Zygote: _jvec, _eyelike, _gradcopy!
return J
end

function __jacobian!(_, ::Union{AutoSparseZygote, AutoZygote}, f!::F, fx, x) where {F}
function __jacobian!(_, ::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f!::F, fx, x) where {F}
return error("Zygote.jl cannot differentiate in-place (mutating) functions.")
end

Expand Down
6 changes: 2 additions & 4 deletions src/SparseDiffTools.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,8 @@ import Graphs: SimpleGraph
# Differentiation
using FiniteDiff, ForwardDiff
@reexport using ADTypes
import ADTypes: AbstractADType, AutoSparseZygote, AbstractSparseForwardMode,
AbstractSparseReverseMode, AbstractSparseFiniteDifferences,
AbstractReverseMode
import ADTypes: AbstractADType, AutoSparse, ForwardMode, ForwardOrReverseMode, ReverseMode,
SymbolicMode, mode
import ForwardDiff: Dual, jacobian, partials, DEFAULT_CHUNK_THRESHOLD
# Array Packages
using ArrayInterface, SparseArrays
Expand Down Expand Up @@ -90,7 +89,6 @@ export JacVec, HesVec, HesVecGrad, VecJac
export update_coefficients, update_coefficients!, value!

# High Level Interface: sparse_jacobian
export AutoSparseEnzyme

export NoSparsityDetection, SymbolicsSparsityDetection, JacPrototypeSparsityDetection,
PrecomputedJacobianColorvec, ApproximateJacobianSparsity, AutoSparsityDetection
Expand Down
22 changes: 11 additions & 11 deletions src/highlevel/coloring.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,16 @@ struct NoMatrixColoring end
(::AbstractMaybeSparsityDetection)(::AbstractADType, args...; kws...) = NoMatrixColoring()

# Prespecified Jacobian Structure
function (alg::JacPrototypeSparsityDetection)(ad::AbstractSparseADType, args...; kwargs...)
function (alg::JacPrototypeSparsityDetection)(ad::AutoSparse, args...; kwargs...)
J = alg.jac_prototype
colorvec = matrix_colors(J, alg.alg;
partition_by_rows = ad isa AbstractSparseReverseMode)
partition_by_rows = mode(ad) isa ReverseMode)
(nz_rows, nz_cols) = ArrayInterface.findstructralnz(J)
return MatrixColoringResult(colorvec, J, nz_rows, nz_cols)
end

# Prespecified Colorvecs
function (alg::PrecomputedJacobianColorvec)(ad::AbstractSparseADType, args...; kwargs...)
function (alg::PrecomputedJacobianColorvec)(ad::AutoSparse, args...; kwargs...)
colorvec = _get_colorvec(alg, ad)
J = alg.jac_prototype
(nz_rows, nz_cols) = ArrayInterface.findstructralnz(J)
Expand All @@ -33,10 +33,10 @@ end
# Approximate Jacobian Sparsity Detection
## Right now we hardcode it to use `ForwardDiff`
function (alg::ApproximateJacobianSparsity)(
ad::AbstractSparseADType, f::F, x; fx = nothing,
ad::AutoSparse, f::F, x; fx = nothing,
kwargs...) where {F}
if !(ad isa AutoSparseForwardDiff)
if ad isa AutoSparsePolyesterForwardDiff
if !(ad isa AutoSparse{<:AutoForwardDiff})
if ad isa AutoSparse{<:AutoPolyesterForwardDiff}
@warn "$(ad) is only supported if `PolyesterForwardDiff` is explicitly loaded. Using ForwardDiff instead." maxlog=1
else
@warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1
Expand All @@ -57,10 +57,10 @@ function (alg::ApproximateJacobianSparsity)(
fx, kwargs...)
end

function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, fx, x;
function (alg::ApproximateJacobianSparsity)(ad::AutoSparse, f::F, fx, x;
kwargs...) where {F}
if !(ad isa AutoSparseForwardDiff)
if ad isa AutoSparsePolyesterForwardDiff
if !(ad isa AutoSparse{<:AutoForwardDiff})
if ad isa AutoSparse{<:AutoPolyesterForwardDiff}
@warn "$(ad) is only supported if `PolyesterForwardDiff` is explicitly loaded. Using ForwardDiff instead." maxlog=1
else
@warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1
Expand All @@ -81,7 +81,7 @@ function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, fx,
end

function (alg::ApproximateJacobianSparsity)(
ad::AutoSparseFiniteDiff, f::F, x; fx = nothing,
ad::AutoSparse{<:AutoFiniteDiff}, f::F, x; fx = nothing,
kwargs...) where {F}
@unpack ntrials, rng = alg
fx = fx === nothing ? f(x) : fx
Expand All @@ -98,7 +98,7 @@ function (alg::ApproximateJacobianSparsity)(
fx, kwargs...)
end

function (alg::ApproximateJacobianSparsity)(ad::AutoSparseFiniteDiff, f!::F, fx, x;
function (alg::ApproximateJacobianSparsity)(ad::AutoSparse{<:AutoFiniteDiff}, f!::F, fx, x;
kwargs...) where {F}
@unpack ntrials, rng = alg
cache = FiniteDiff.JacobianCache(x, fx)
Expand Down
25 changes: 10 additions & 15 deletions src/highlevel/common.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
const AbstractSparseADType = Union{AbstractSparseForwardMode, AbstractSparseReverseMode,
AbstractSparseFiniteDifferences}

struct AutoSparseEnzyme <: AbstractSparseReverseMode end

# Sparsity Detection
abstract type AbstractMaybeSparsityDetection end
abstract type AbstractSparsityDetection <: AbstractMaybeSparsityDetection end
Expand Down Expand Up @@ -106,7 +101,7 @@ function _get_colorvec(alg::PrecomputedJacobianColorvec, ad)
return cvec
end

function _get_colorvec(alg::PrecomputedJacobianColorvec, ::AbstractReverseMode)
function _get_colorvec(alg::PrecomputedJacobianColorvec, ::ReverseMode)
cvec = alg.row_colorvec
@assert cvec!==nothing "`row_colorvec` is nothing, but Reverse Mode AD is being used!"
return cvec
Expand Down Expand Up @@ -270,8 +265,8 @@ const __init_𝒥 = init_jacobian

# Misc Functions
function __chunksize(
::Union{AutoSparseForwardDiff{C}, AutoForwardDiff{C},
AutoSparsePolyesterForwardDiff{C}, AutoPolyesterForwardDiff{C}},
::Union{AutoSparse{<:AutoForwardDiff}{C}, AutoForwardDiff{C},
AutoSparse{<:AutoPolyesterForwardDiff}{C}, AutoPolyesterForwardDiff{C}},
x) where {C}
C isa ForwardDiff.Chunk && return C
return __chunksize(Val(C), x)
Expand All @@ -288,8 +283,8 @@ end
__chunksize(x) = ForwardDiff.Chunk(x)
__chunksize(x::StaticArray) = ForwardDiff.Chunk{ForwardDiff.pickchunksize(prod(Size(x)))}()

function __chunksize(::Union{AutoSparseForwardDiff{C}, AutoForwardDiff{C},
AutoSparsePolyesterForwardDiff{C}, AutoPolyesterForwardDiff{C}}) where {C}
function __chunksize(::Union{AutoSparse{<:AutoForwardDiff}{C}, AutoForwardDiff{C},
AutoSparse{<:AutoPolyesterForwardDiff}{C}, AutoPolyesterForwardDiff{C}}) where {C}
C === nothing && return nothing
C isa Integer && !(C isa Bool) && return C 0 ? nothing : Val(C)
return nothing
Expand Down Expand Up @@ -348,8 +343,8 @@ __maybe_copy_x(_, ::Nothing) = nothing
end

@inline __backend(ad) = nothing
@inline __backend(::Union{AutoEnzyme, AutoSparseEnzyme}) = :Enzyme
@inline __backend(::Union{AutoZygote, AutoSparseZygote}) = :Zygote
@inline __backend(::Union{AutoForwardDiff, AutoSparseForwardDiff}) = :ForwardDiff
@inline __backend(::Union{AutoPolyesterForwardDiff, AutoSparsePolyesterForwardDiff}) = :PolyesterForwardDiff
@inline __backend(::Union{AutoFiniteDiff, AutoSparseFiniteDiff}) = :FiniteDiff
@inline __backend(::Union{AutoEnzyme, AutoSparse{<:AutoEnzyme}}) = :Enzyme
@inline __backend(::Union{AutoZygote, AutoSparse{<:AutoZygote}}) = :Zygote
@inline __backend(::Union{AutoForwardDiff, AutoSparse{<:AutoForwardDiff}}) = :ForwardDiff
@inline __backend(::Union{AutoPolyesterForwardDiff, AutoSparse{<:AutoPolyesterForwardDiff}}) = :PolyesterForwardDiff
@inline __backend(::Union{AutoFiniteDiff, AutoSparse{<:AutoFiniteDiff}}) = :FiniteDiff
4 changes: 2 additions & 2 deletions src/highlevel/finite_diff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ end

__getfield(c::FiniteDiffJacobianCache, ::Val{:jac_prototype}) = c.jac_prototype

function sparse_jacobian_cache(fd::Union{AutoSparseFiniteDiff, AutoFiniteDiff},
function sparse_jacobian_cache(fd::Union{AutoSparse{<:AutoFiniteDiff}, AutoFiniteDiff},
sd::AbstractMaybeSparsityDetection, f::F, x; fx = nothing) where {F}
coloring_result = sd(fd, f, x)
fx = fx === nothing ? similar(f(x)) : fx
Expand All @@ -23,7 +23,7 @@ function sparse_jacobian_cache(fd::Union{AutoSparseFiniteDiff, AutoFiniteDiff},
return FiniteDiffJacobianCache(coloring_result, cache, jac_prototype, fx, x)
end

function sparse_jacobian_cache(fd::Union{AutoSparseFiniteDiff, AutoFiniteDiff},
function sparse_jacobian_cache(fd::Union{AutoSparse{<:AutoFiniteDiff}, AutoFiniteDiff},
sd::AbstractMaybeSparsityDetection, f!::F, fx, x) where {F}
coloring_result = sd(fd, f!, fx, x)
if coloring_result isa NoMatrixColoring
Expand Down
4 changes: 2 additions & 2 deletions src/highlevel/forward_mode.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ __standard_tag(::Nothing, f::F, x) where {F} = ForwardDiff.Tag(f, eltype(x))
__standard_tag(tag::ForwardDiff.Tag, ::F, _) where {F} = tag
__standard_tag(tag, f::F, x) where {F} = ForwardDiff.Tag(f, eltype(x))

function sparse_jacobian_cache(ad::Union{AutoSparseForwardDiff, AutoForwardDiff},
function sparse_jacobian_cache(ad::Union{AutoSparse{<:AutoForwardDiff}, AutoForwardDiff},
sd::AbstractMaybeSparsityDetection, f::F, x; fx = nothing) where {F}
coloring_result = sd(ad, f, x)
fx = fx === nothing ? similar(f(x)) : fx
Expand All @@ -29,7 +29,7 @@ function sparse_jacobian_cache(ad::Union{AutoSparseForwardDiff, AutoForwardDiff}
return ForwardDiffJacobianCache(coloring_result, cache, jac_prototype, fx, x)
end

function sparse_jacobian_cache(ad::Union{AutoSparseForwardDiff, AutoForwardDiff},
function sparse_jacobian_cache(ad::Union{AutoSparse{<:AutoForwardDiff}, AutoForwardDiff},
sd::AbstractMaybeSparsityDetection, f!::F, fx, x) where {F}
coloring_result = sd(ad, f!, fx, x)
tag = __standard_tag(ad.tag, f!, x)
Expand Down
Loading

0 comments on commit dbd8a73

Please sign in to comment.