Skip to content

Commit

Permalink
test/runtests.jl: Tests for CUDAext to have full coverage.
Browse files Browse the repository at this point in the history
  • Loading branch information
mashu committed Nov 23, 2024
1 parent c65d81b commit fa4940b
Showing 1 changed file with 34 additions and 1 deletion.
35 changes: 34 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,39 @@ using PositionalEmbeddings
function has_working_cuda()
return CUDA.has_cuda() && CUDA.functional()
end

@testset "CUDA Extension Tests" begin
# Only run tests if CUDA is functional
if has_working_cuda()
@testset "RoPE CUDA conversion" begin
features = 16
seq_len = 32
rope_cpu = RoPE(features, seq_len)

# Test conversion to GPU
rope_gpu = cu(rope_cpu)

# Check type conversion
@test rope_gpu isa RoPE
@test rope_gpu.cos_cached isa CuArray
@test rope_gpu.sin_cached isa CuArray

# Check values after conversion
@test Array(rope_gpu.cos_cached) rope_cpu.cos_cached
@test Array(rope_gpu.sin_cached) rope_cpu.sin_cached

# Check other properties remain unchanged
@test rope_gpu.features == rope_cpu.features
@test rope_gpu.scale == rope_cpu.scale
end
else
# Mock tests when CUDA is not available
@testset "Mock CUDA Tests" begin
@test_skip "CUDA GPU is not available"
end
end
end

@testset "RoPE Tests" begin
@testset "Gradient Tests (CPU, Float64)" begin
eps = 1e-8
Expand Down Expand Up @@ -121,4 +154,4 @@ end
@test size(output) == (seq_len, features, batch_size)
@test output[1:5, 1:5, 1] expected rtol=1e-5
end
end
end

0 comments on commit fa4940b

Please sign in to comment.