Skip to content

Commit

Permalink
Reorganize the tests with CUDA, Enzyme and Zygote
Browse files Browse the repository at this point in the history
  • Loading branch information
amontoison committed Nov 26, 2024
1 parent e6eb171 commit 3a35293
Show file tree
Hide file tree
Showing 9 changed files with 231 additions and 227 deletions.
30 changes: 28 additions & 2 deletions .buildkite/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,32 @@ steps:
queue: "juliagpu"
cuda: "*"
command: |
julia --color=yes --project -e 'using Pkg; Pkg.add("CUDA"); Pkg.add("NLPModels"); Pkg.add("NLPModelsTest"); Pkg.instantiate()'
julia --color=yes --project -e 'include("test/gpu.jl")'
julia --color=yes --project=test -e 'using Pkg; Pkg.add("CUDA"); Pkg.develop(path="."); Pkg.instantiate()'
julia --color=yes --project=test -e 'include("test/gpu.jl")'
timeout_in_minutes: 30

- label: "CPUs -- Enzyme.jl"
plugins:
- JuliaCI/julia#v1:
version: "1.10"
agents:
queue: "juliaecosystem"
os: "linux"
arch: "x86_64"
command: |
julia --color=yes --project=test -e 'using Pkg; Pkg.add("Enzyme"); Pkg.develop(path="."); Pkg.instantiate()'
julia --color=yes --project=test -e 'include("test/enzyme.jl")'
timeout_in_minutes: 30

- label: "CPUs -- Zygote.jl"
plugins:
- JuliaCI/julia#v1:
version: "1.10"
agents:
queue: "juliaecosystem"
os: "linux"
arch: "x86_64"
command: |
julia --color=yes --project=test -e 'using Pkg; Pkg.add("Zygote"); Pkg.develop(path="."); Pkg.instantiate()'
julia --color=yes --project=test -e 'include("test/zygote.jl")'
timeout_in_minutes: 30
6 changes: 0 additions & 6 deletions test/Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
[deps]
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
ManualNLPModels = "30dfa513-9b2f-4fb3-9796-781eabac1617"
Expand All @@ -11,16 +9,12 @@ ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
CUDA = "4, 5"
Enzyme = "0.10, 0.11, 0.12"
ForwardDiff = "0.10"
ManualNLPModels = "0.1"
NLPModels = "0.21"
NLPModelsModifiers = "0.7"
NLPModelsTest = "0.10"
ReverseDiff = "1"
SparseMatrixColorings = "0.4.0"
Zygote = "0.6"
83 changes: 83 additions & 0 deletions test/enzyme.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
using LinearAlgebra, SparseArrays, Test
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
using ADNLPModels:
gradient, gradient!, jacobian, hessian, Jprod!, Jtprod!, directional_second_derivative, Hvprod!

# Automatically loads the code for Enzyme with Requires
import Enzyme

#=
ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend()
names = OptimizationProblems.meta[!, :name]
list_excluded_enzyme = [
"brybnd",
"clplatea",
"clplateb",
"clplatec",
"curly",
"curly10",
"curly20",
"curly30",
"elec",
"fminsrf2",
"hs101",
"hs117",
"hs119",
"hs86",
"integreq",
"ncb20",
"ncb20b",
"palmer1c",
"palmer1d",
"palmer2c",
"palmer3c",
"palmer4c",
"palmer5c",
"palmer5d",
"palmer6c",
"palmer7c",
"palmer8c",
"sbrybnd",
"tetra",
"tetra_duct12",
"tetra_duct15",
"tetra_duct20",
"tetra_foam5",
"tetra_gear",
"tetra_hook",
"threepk",
"triangle",
"triangle_deer",
"triangle_pacman",
"triangle_turtle",
"watson",
]
for pb in names
@info pb
(pb in list_excluded_enzyme) && continue
nlp = eval(Meta.parse(pb))(
gradient_backend = ADNLPModels.EnzymeADGradient,
jacobian_backend = ADNLPModels.EmptyADbackend,
hessian_backend = ADNLPModels.EmptyADbackend,
)
grad(nlp, get_x0(nlp))
end
=#

#=
ERROR: Duplicated Returns not yet handled
Stacktrace:
[1] autodiff
@.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:209 [inlined]
[2] autodiff(mode::EnzymeCore.ReverseMode, f::OptimizationProblems.ADNLPProblems.var"#f#254"{OptimizationProblems.ADNLPProblems.var"#f#250#255"}, args::Duplicated{Vector{Float64}})
@ Enzyme.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:248
[3] gradient!(#unused#::ADNLPModels.EnzymeADGradient, g::Vector{Float64}, f::Function, x::Vector{Float64})
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\enzyme.jl:17
[4] grad!(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}, g::Vector{Float64})
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\nlp.jl:542
[5] grad(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64})
@ NLPModels.julia\packages\NLPModels\XBcWL\src\nlp\api.jl:31
[6] top-level scope
@ .\REPL[7]:5
=#
17 changes: 0 additions & 17 deletions test/nlp/nlpmodelstest.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,6 @@
@testset "Check multiple precision" begin
multiple_precision_nlp(nlp_from_T, exclude = [], linear_api = true)
end
@testset "Check multiple precision GPU" begin
if CUDA.functional()
CUDA.allowscalar() do
# sparse Jacobian/Hessian doesn't work here
multiple_precision_nlp_array(
T -> nlp_from_T(
T;
jacobian_backend = ADNLPModels.ForwardDiffADJacobian,
hessian_backend = ADNLPModels.ForwardDiffADHessian,
),
CuArray,
exclude = [jth_hprod, hprod, jprod],
linear_api = true,
)
end
end
end
@testset "Check view subarray" begin
view_subarray_nlp(nlp_ad, exclude = [])
end
Expand Down
19 changes: 0 additions & 19 deletions test/nls/nlpmodelstest.jl
Original file line number Diff line number Diff line change
Expand Up @@ -35,25 +35,6 @@
@testset "Check multiple precision" begin
multiple_precision_nls(nls_from_T, exclude = exclude, linear_api = true)
end
@testset "Check multiple precision GPU" begin
if CUDA.functional()
CUDA.allowscalar() do
# sparse Jacobian/Hessian doesn't work here
multiple_precision_nls_array(
T -> nls_from_T(
T;
jacobian_backend = ADNLPModels.ForwardDiffADJacobian,
hessian_backend = ADNLPModels.ForwardDiffADHessian,
jacobian_residual_backend = ADNLPModels.ForwardDiffADJacobian,
hessian_residual_backend = ADNLPModels.ForwardDiffADHessian,
),
CuArray,
exclude = [jprod, jprod_residual, hprod_residual],
linear_api = true,
)
end
end
end
@testset "Check view subarray" begin
view_subarray_nls.(nlss, exclude = exclude)
end
Expand Down
106 changes: 2 additions & 104 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using CUDA, LinearAlgebra, SparseArrays, Test
using LinearAlgebra, SparseArrays, Test
using SparseMatrixColorings
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
using ADNLPModels:
Expand Down Expand Up @@ -40,109 +40,7 @@ for problem in NLPModelsTest.nls_problems
include("nls/problems/$(lowercase(problem)).jl")
end

# Additional backends used for tests
push!(
ADNLPModels.predefined_backend,
:zygote_backend => Dict(
:gradient_backend => ADNLPModels.ZygoteADGradient,
:jprod_backend => ADNLPModels.ZygoteADJprod,
:jtprod_backend => ADNLPModels.ZygoteADJtprod,
:hprod_backend => ADNLPModels.ForwardDiffADHvprod,
:jacobian_backend => ADNLPModels.ZygoteADJacobian,
:hessian_backend => ADNLPModels.ZygoteADHessian,
:ghjvprod_backend => ADNLPModels.ForwardDiffADGHjvprod,
:jprod_residual_backend => ADNLPModels.ZygoteADJprod,
:jtprod_residual_backend => ADNLPModels.ZygoteADJtprod,
:hprod_residual_backend => ADNLPModels.ForwardDiffADHvprod,
:jacobian_residual_backend => ADNLPModels.ZygoteADJacobian,
:hessian_residual_backend => ADNLPModels.ZygoteADHessian,
),
)

ReverseDiffAD(nvar, f) = ADNLPModels.ADModelBackend(
nvar,
f,
gradient_backend = ADNLPModels.ReverseDiffADGradient,
hprod_backend = ADNLPModels.ReverseDiffADHvprod,
jprod_backend = ADNLPModels.ReverseDiffADJprod,
jtprod_backend = ADNLPModels.ReverseDiffADJtprod,
jacobian_backend = ADNLPModels.ReverseDiffADJacobian,
hessian_backend = ADNLPModels.ReverseDiffADHessian,
)

function test_getter_setter(nlp)
@test get_adbackend(nlp) == nlp.adbackend
if typeof(nlp) <: ADNLPModel
set_adbackend!(nlp, ReverseDiffAD(nlp.meta.nvar, nlp.f))
elseif typeof(nlp) <: ADNLSModel
function F(x; nequ = nlp.nls_meta.nequ)
Fx = similar(x, nequ)
nlp.F!(Fx, x)
return Fx
end
set_adbackend!(nlp, ReverseDiffAD(nlp.meta.nvar, x -> sum(F(x) .^ 2)))
end
@test typeof(get_adbackend(nlp).gradient_backend) <: ADNLPModels.ReverseDiffADGradient
@test typeof(get_adbackend(nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
@test typeof(get_adbackend(nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
set_adbackend!(
nlp,
gradient_backend = ADNLPModels.ForwardDiffADGradient,
jtprod_backend = ADNLPModels.GenericForwardDiffADJtprod(),
)
@test typeof(get_adbackend(nlp).gradient_backend) <: ADNLPModels.ForwardDiffADGradient
@test typeof(get_adbackend(nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
@test typeof(get_adbackend(nlp).jtprod_backend) <: ADNLPModels.GenericForwardDiffADJtprod
@test typeof(get_adbackend(nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
end

ZygoteAD() = ADNLPModels.ADModelBackend(
ADNLPModels.ZygoteADGradient(),
ADNLPModels.GenericForwardDiffADHvprod(),
ADNLPModels.ZygoteADJprod(),
ADNLPModels.ZygoteADJtprod(),
ADNLPModels.ZygoteADJacobian(0),
ADNLPModels.ZygoteADHessian(0),
ADNLPModels.ForwardDiffADGHjvprod(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
)

function test_autodiff_backend_error()
@testset "Error without loading package - $backend" for backend in [:ZygoteAD]
adbackend = eval(backend)()
@test_throws ArgumentError gradient(adbackend.gradient_backend, sum, [1.0])
@test_throws ArgumentError gradient!(adbackend.gradient_backend, [1.0], sum, [1.0])
@test_throws ArgumentError jacobian(adbackend.jacobian_backend, identity, [1.0])
@test_throws ArgumentError hessian(adbackend.hessian_backend, sum, [1.0])
@test_throws ArgumentError Jprod!(
adbackend.jprod_backend,
[1.0],
[1.0],
identity,
[1.0],
Val(:c),
)
@test_throws ArgumentError Jtprod!(
adbackend.jtprod_backend,
[1.0],
[1.0],
identity,
[1.0],
Val(:c),
)
end
end

# Test the argument error without loading the packages
test_autodiff_backend_error()

# Automatically loads the code for Zygote with Requires
import Zygote

include("utils.jl")
include("nlp/basic.jl")
include("nls/basic.jl")
include("nlp/nlpmodelstest.jl")
Expand Down
79 changes: 0 additions & 79 deletions test/script_OP.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
# script that tests ADNLPModels over OptimizationProblems.jl problems

# optional deps
# using Enzyme

# AD deps
using ForwardDiff, ReverseDiff

Expand Down Expand Up @@ -55,79 +52,3 @@ for pb in names
continue
end
end

#=
ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend()
names = OptimizationProblems.meta[!, :name]
list_excluded_enzyme = [
"brybnd",
"clplatea",
"clplateb",
"clplatec",
"curly",
"curly10",
"curly20",
"curly30",
"elec",
"fminsrf2",
"hs101",
"hs117",
"hs119",
"hs86",
"integreq",
"ncb20",
"ncb20b",
"palmer1c",
"palmer1d",
"palmer2c",
"palmer3c",
"palmer4c",
"palmer5c",
"palmer5d",
"palmer6c",
"palmer7c",
"palmer8c",
"sbrybnd",
"tetra",
"tetra_duct12",
"tetra_duct15",
"tetra_duct20",
"tetra_foam5",
"tetra_gear",
"tetra_hook",
"threepk",
"triangle",
"triangle_deer",
"triangle_pacman",
"triangle_turtle",
"watson",
]
for pb in names
@info pb
(pb in list_excluded_enzyme) && continue
nlp = eval(Meta.parse(pb))(
gradient_backend = ADNLPModels.EnzymeADGradient,
jacobian_backend = ADNLPModels.EmptyADbackend,
hessian_backend = ADNLPModels.EmptyADbackend,
)
grad(nlp, get_x0(nlp))
end
=#

#=
ERROR: Duplicated Returns not yet handled
Stacktrace:
[1] autodiff
@.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:209 [inlined]
[2] autodiff(mode::EnzymeCore.ReverseMode, f::OptimizationProblems.ADNLPProblems.var"#f#254"{OptimizationProblems.ADNLPProblems.var"#f#250#255"}, args::Duplicated{Vector{Float64}})
@ Enzyme.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:248
[3] gradient!(#unused#::ADNLPModels.EnzymeADGradient, g::Vector{Float64}, f::Function, x::Vector{Float64})
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\enzyme.jl:17
[4] grad!(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}, g::Vector{Float64})
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\nlp.jl:542
[5] grad(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64})
@ NLPModels.julia\packages\NLPModels\XBcWL\src\nlp\api.jl:31
[6] top-level scope
@ .\REPL[7]:5
=#
Loading

0 comments on commit 3a35293

Please sign in to comment.