Skip to content

Commit

Permalink
Add tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mtfishman committed Oct 3, 2023
1 parent dd84b9f commit 3af05a5
Show file tree
Hide file tree
Showing 8 changed files with 126 additions and 43 deletions.
2 changes: 2 additions & 0 deletions NDTensors/src/BlockSparseArrays/src/BlockSparseArrays.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ module BlockSparseArrays
using BlockArrays
using Dictionaries

using BlockArrays: block

export BlockSparseArray, SparseArray

include("sparsearray.jl")
Expand Down
8 changes: 6 additions & 2 deletions NDTensors/src/BlockSparseArrays/src/blocksparsearray.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,22 @@ end

(f::BlockZero)(T::Type, I::CartesianIndex) = fill!(T(undef, block_size(f.axes, Block(Tuple(I)))), false)

function BlockSparseArray(blocks::AbstractVector{<:Block}, blockdata::AbstractVector, axes::Tuple)
function BlockSparseArray(blocks::AbstractVector{<:Block{N}}, blockdata::AbstractVector, axes::NTuple{N}) where {N}
return BlockSparseArray(Dictionary(blocks, blockdata), axes)
end

function BlockSparseArray(blockdata::Dictionary{<:Block}, axes::Tuple)
function BlockSparseArray(blockdata::Dictionary{<:Block{N}}, axes::NTuple{N,AbstractUnitRange{Int}}) where {N}
blocks = keys(blockdata)
cartesianblocks = map(block -> CartesianIndex(block.n), blocks)
cartesiandata = Dictionary(cartesianblocks, blockdata)
block_storage = SparseArray(cartesiandata, blocklength.(axes), BlockZero(axes))
return BlockSparseArray(block_storage, axes)
end

function BlockSparseArray(blockdata::Dictionary{<:Block{N}}, blockinds::NTuple{N,AbstractVector}) where {N}
return BlockSparseArray(blockdata, blockedrange.(blockinds))
end

Base.axes(block_arr::BlockSparseArray) = block_arr.axes

function Base.copy(block_arr::BlockSparseArray)
Expand Down
1 change: 1 addition & 0 deletions NDTensors/src/NDTensors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ include("empty/adapt.jl")
#
include("arraytensor/arraytensor.jl")
include("arraytensor/array.jl")
include("arraytensor/blocksparsearray.jl")

#####################################
# Deprecations
Expand Down
3 changes: 2 additions & 1 deletion NDTensors/src/arraytensor/arraytensor.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Used for dispatch to distinguish from Tensors wrapping TensorStorage.
# Remove once TensorStorage is removed.
const ArrayStorage{T,N} = Union{
Array{T,N},ReshapedArray{T,N},SubArray{T,N},PermutedDimsArray{T,N},StridedView{T,N}
Array{T,N},ReshapedArray{T,N},SubArray{T,N},PermutedDimsArray{T,N},StridedView{T,N},BlockSparseArray{T,N}
}
const MatrixStorage{T} = Union{
ArrayStorage{T,2},
Expand Down Expand Up @@ -41,6 +41,7 @@ function setindex!(tensor::MatrixOrArrayStorageTensor, v, I::Integer...)
return tensor
end

# TODO: Just call `contraction_output(storage(tensor1), storage(tensor2), indsR)`
function contraction_output(
tensor1::MatrixOrArrayStorageTensor, tensor2::MatrixOrArrayStorageTensor, indsR
)
Expand Down
18 changes: 18 additions & 0 deletions NDTensors/src/arraytensor/blocksparsearray.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# TODO: Implement.
function contraction_output(
tensor1::BlockSparseArray, tensor2::BlockSparseArray, indsR
)
return error("Not implemented")
end

# TODO: Implement.
function contract!(
tensorR::BlockSparseArray,
labelsR,
tensor1::BlockSparseArray,
labels1,
tensor2::BlockSparseArray,
labels2,
)
return error("Not implemented")
end
45 changes: 45 additions & 0 deletions NDTensors/test/arraytensor/array.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
using NDTensors
using LinearAlgebra
using Test

using NDTensors: storage, storagetype

@testset "Tensor wrapping Array" begin
is1 = (2, 3)
D1 = randn(is1)

is2 = (3, 4)
D2 = randn(is2)

T1 = tensor(D1, is1)
T2 = tensor(D2, is2)

@test T1[1, 1] == D1[1, 1]

x = rand()
T1[1, 1] = x

@test T1[1, 1] == x
@test array(T1) == D1
@test storagetype(T1) <: Matrix{Float64}
@test storage(T1) == D1
@test eltype(T1) == eltype(D1)
@test inds(T1) == is1

R = T1 * T2
@test storagetype(R) <: Matrix{Float64}
@test Array(R) Array(T1) * Array(T2)

T1r = randn!(similar(T1))
@test Array(T1r + T1) Array(T1r) + Array(T1)
@test Array(permutedims(T1, (2, 1))) permutedims(Array(T1), (2, 1))

U, S, V = svd(T1)
@test U * S * V T1

T12 = contract(T1, (1, -1), T2, (-1, 2))
@test T12 T1 * T2

D12 = contract(D1, (1, -1), D2, (-1, 2))
@test D12 Array(T12)
end
43 changes: 3 additions & 40 deletions NDTensors/test/arraytensor/arraytensor.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,44 +2,7 @@ using NDTensors
using LinearAlgebra
using Test

using NDTensors: storage, storagetype

@testset "Tensor wrapping Array" begin
is1 = (2, 3)
D1 = randn(is1)

is2 = (3, 4)
D2 = randn(is2)

T1 = tensor(D1, is1)
T2 = tensor(D2, is2)

@test T1[1, 1] == D1[1, 1]

x = rand()
T1[1, 1] = x

@test T1[1, 1] == x
@test array(T1) == D1
@test storagetype(T1) <: Matrix{Float64}
@test storage(T1) == D1
@test eltype(T1) == eltype(D1)
@test inds(T1) == is1

R = T1 * T2
@test storagetype(R) <: Matrix{Float64}
@test Array(R) Array(T1) * Array(T2)

T1r = randn!(similar(T1))
@test Array(T1r + T1) Array(T1r) + Array(T1)
@test Array(permutedims(T1, (2, 1))) permutedims(Array(T1), (2, 1))

U, S, V = svd(T1)
@test U * S * V T1

T12 = contract(T1, (1, -1), T2, (-1, 2))
@test T12 T1 * T2

D12 = contract(D1, (1, -1), D2, (-1, 2))
@test D12 Array(T12)
@testset "Tensor wrapping AbstractArrays" begin
include("array.jl")
include("blocksparsearray.jl")
end
49 changes: 49 additions & 0 deletions NDTensors/test/arraytensor/blocksparsearray.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
using NDTensors
using NDTensors.BlockSparseArrays
using BlockArrays
using LinearAlgebra
using Test

using BlockArrays: Block

using NDTensors: storage, storagetype

@testset "Tensor wrapping BlockSparseArray" begin
is1 = ([1, 1], [1, 2])
D1 = BlockSparseArray([Block(1, 1), Block(2, 2)], [randn(1, 1), randn(1, 2)], is1)

is2 = ([1, 2], [2, 2])
D2 = BlockSparseArray([Block(1, 1), Block(2, 2)], [randn(1, 2), randn(2, 2)], is2)

T1 = tensor(D1, is1)
T2 = tensor(D2, is2)

@test T1[1, 1] == D1[1, 1]

x = rand()
T1[1, 1] = x

@test T1[1, 1] == x
@test array(T1) == D1
@test storagetype(T1) <: BlockSparseArray{Float64,2}
@test storage(T1) == D1
@test eltype(T1) == eltype(D1)
@test inds(T1) == is1

@test_broken R = T1 * T2
@test_broken storagetype(R) <: Matrix{Float64}
@test_broken Array(R) Array(T1) * Array(T2)

@test_broken T1r = randn!(similar(T1))
@test_broken Array(T1r + T1) Array(T1r) + Array(T1)
@test_broken Array(permutedims(T1, (2, 1))) permutedims(Array(T1), (2, 1))

U, S, V = svd(T1)
@test U * S * V T1

@test_broken T12 = contract(T1, (1, -1), T2, (-1, 2))
@test_broken T12 T1 * T2

@test_broken D12 = contract(D1, (1, -1), D2, (-1, 2))
@test_broken D12 Array(T12)
end

0 comments on commit 3af05a5

Please sign in to comment.