diff --git a/NDTensors/Project.toml b/NDTensors/Project.toml index eb98db55e2..ab7186f807 100644 --- a/NDTensors/Project.toml +++ b/NDTensors/Project.toml @@ -11,7 +11,7 @@ Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" FLoops = "cc61a311-1640-44b5-9fba-1b764f453329" Folds = "41a02a25-b8f0-4f67-bc48-60067656b558" Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" -GPUArraysCore="46192b85-c4d5-4398-a991-12ede77f4527" +GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" InlineStrings = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" @@ -25,6 +25,7 @@ Strided = "5e0ebb24-38b0-5f93-81fe-25c709ecae67" StridedViews = "4db3bf67-4bd7-4b4e-b153-31dc3fb37143" TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" TupleTools = "9d95972d-f1c8-5527-a6e0-b4b365fa01f6" +VectorInterface = "409d34a3-91d5-4945-b6ec-7529ddf182d8" [weakdeps] CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" diff --git a/NDTensors/src/NDTensors.jl b/NDTensors/src/NDTensors.jl index f9b6dcd154..fd1bbd707d 100644 --- a/NDTensors/src/NDTensors.jl +++ b/NDTensors/src/NDTensors.jl @@ -137,52 +137,6 @@ include("empty/EmptyTensor.jl") include("empty/tensoralgebra/contract.jl") include("empty/adapt.jl") -##################################### -# Array Tensor (experimental) -# - -# TODO: Turn this into a module `CombinerArray`. -include("arraystorage/combiner/storage/combinerarray.jl") - -include("arraystorage/arraystorage/storage/arraystorage.jl") -include("arraystorage/arraystorage/storage/conj.jl") -include("arraystorage/arraystorage/storage/permutedims.jl") -include("arraystorage/arraystorage/storage/contract.jl") - -include("arraystorage/arraystorage/tensor/arraystorage.jl") -include("arraystorage/arraystorage/tensor/zeros.jl") -include("arraystorage/arraystorage/tensor/indexing.jl") -include("arraystorage/arraystorage/tensor/permutedims.jl") -include("arraystorage/arraystorage/tensor/mul.jl") -include("arraystorage/arraystorage/tensor/contract.jl") -include("arraystorage/arraystorage/tensor/qr.jl") -include("arraystorage/arraystorage/tensor/eigen.jl") -include("arraystorage/arraystorage/tensor/svd.jl") - -# DiagonalArray storage -include("arraystorage/diagonalarray/storage/contract.jl") - -include("arraystorage/diagonalarray/tensor/contract.jl") - -# BlockSparseArray storage -include("arraystorage/blocksparsearray/storage/unwrap.jl") -include("arraystorage/blocksparsearray/storage/contract.jl") - -include("arraystorage/blocksparsearray/tensor/contract.jl") - -# Combiner storage -include("arraystorage/combiner/storage/promote_rule.jl") -include("arraystorage/combiner/storage/contract_utils.jl") -include("arraystorage/combiner/storage/contract.jl") - -include("arraystorage/combiner/tensor/to_arraystorage.jl") -include("arraystorage/combiner/tensor/contract.jl") - -include("arraystorage/blocksparsearray/storage/combiner/contract.jl") -include("arraystorage/blocksparsearray/storage/combiner/contract_utils.jl") -include("arraystorage/blocksparsearray/storage/combiner/contract_combine.jl") -include("arraystorage/blocksparsearray/storage/combiner/contract_uncombine.jl") - ##################################### # Deprecations # diff --git a/NDTensors/src/arraystorage/arraystorage/storage/arraystorage.jl b/NDTensors/src/backup/arraystorage/arraystorage/storage/arraystorage.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/storage/arraystorage.jl rename to NDTensors/src/backup/arraystorage/arraystorage/storage/arraystorage.jl diff --git a/NDTensors/src/arraystorage/arraystorage/storage/conj.jl b/NDTensors/src/backup/arraystorage/arraystorage/storage/conj.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/storage/conj.jl rename to NDTensors/src/backup/arraystorage/arraystorage/storage/conj.jl diff --git a/NDTensors/src/arraystorage/arraystorage/storage/contract.jl b/NDTensors/src/backup/arraystorage/arraystorage/storage/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/storage/contract.jl rename to NDTensors/src/backup/arraystorage/arraystorage/storage/contract.jl diff --git a/NDTensors/src/arraystorage/arraystorage/storage/permutedims.jl b/NDTensors/src/backup/arraystorage/arraystorage/storage/permutedims.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/storage/permutedims.jl rename to NDTensors/src/backup/arraystorage/arraystorage/storage/permutedims.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/arraystorage.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/arraystorage.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/arraystorage.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/arraystorage.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/contract.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/contract.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/contract.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/eigen.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/eigen.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/eigen.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/eigen.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/eigen_generic.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/eigen_generic.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/eigen_generic.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/eigen_generic.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/indexing.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/indexing.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/indexing.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/indexing.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/mul.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/mul.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/mul.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/mul.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/permutedims.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/permutedims.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/permutedims.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/permutedims.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/qr.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/qr.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/qr.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/qr.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/svd.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/svd.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/svd.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/svd.jl diff --git a/NDTensors/src/arraystorage/arraystorage/tensor/zeros.jl b/NDTensors/src/backup/arraystorage/arraystorage/tensor/zeros.jl similarity index 100% rename from NDTensors/src/arraystorage/arraystorage/tensor/zeros.jl rename to NDTensors/src/backup/arraystorage/arraystorage/tensor/zeros.jl diff --git a/NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract.jl b/NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract.jl rename to NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract.jl diff --git a/NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract_combine.jl b/NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract_combine.jl similarity index 100% rename from NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract_combine.jl rename to NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract_combine.jl diff --git a/NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract_uncombine.jl b/NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract_uncombine.jl similarity index 100% rename from NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract_uncombine.jl rename to NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract_uncombine.jl diff --git a/NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract_utils.jl b/NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract_utils.jl similarity index 100% rename from NDTensors/src/arraystorage/blocksparsearray/storage/combiner/contract_utils.jl rename to NDTensors/src/backup/arraystorage/blocksparsearray/storage/combiner/contract_utils.jl diff --git a/NDTensors/src/arraystorage/blocksparsearray/storage/contract.jl b/NDTensors/src/backup/arraystorage/blocksparsearray/storage/contract.jl similarity index 94% rename from NDTensors/src/arraystorage/blocksparsearray/storage/contract.jl rename to NDTensors/src/backup/arraystorage/blocksparsearray/storage/contract.jl index 7007f45d22..b733c37706 100644 --- a/NDTensors/src/arraystorage/blocksparsearray/storage/contract.jl +++ b/NDTensors/src/backup/arraystorage/blocksparsearray/storage/contract.jl @@ -1,6 +1,5 @@ -# TODO: Change to: -# using .SparseArrayDOKs: SparseArrayDOK -using .BlockSparseArrays: SparseArray +# TODO: Define in `SparseArrayInterface`. +using ..SparseArrayDOKs: SparseArrayDOK # TODO: This is inefficient, need to optimize. # Look at `contract_labels`, `contract_blocks` and `maybe_contract_blocks!` in: @@ -39,11 +38,11 @@ function default_contract_muladd(a1, labels1, a2, labels2, a_dest, labels_dest) end function contract!( - a_dest::SparseArray, + a_dest::SparseArrayDOK, labels_dest, - a1::SparseArray, + a1::SparseArrayDOK, labels1, - a2::SparseArray, + a2::SparseArrayDOK, labels2; muladd=default_contract_muladd, ) diff --git a/NDTensors/src/arraystorage/blocksparsearray/storage/unwrap.jl b/NDTensors/src/backup/arraystorage/blocksparsearray/storage/unwrap.jl similarity index 100% rename from NDTensors/src/arraystorage/blocksparsearray/storage/unwrap.jl rename to NDTensors/src/backup/arraystorage/blocksparsearray/storage/unwrap.jl diff --git a/NDTensors/src/arraystorage/blocksparsearray/tensor/contract.jl b/NDTensors/src/backup/arraystorage/blocksparsearray/tensor/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/blocksparsearray/tensor/contract.jl rename to NDTensors/src/backup/arraystorage/blocksparsearray/tensor/contract.jl diff --git a/NDTensors/src/arraystorage/combiner/storage/combinerarray.jl b/NDTensors/src/backup/arraystorage/combiner/storage/combinerarray.jl similarity index 100% rename from NDTensors/src/arraystorage/combiner/storage/combinerarray.jl rename to NDTensors/src/backup/arraystorage/combiner/storage/combinerarray.jl diff --git a/NDTensors/src/arraystorage/combiner/storage/contract.jl b/NDTensors/src/backup/arraystorage/combiner/storage/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/combiner/storage/contract.jl rename to NDTensors/src/backup/arraystorage/combiner/storage/contract.jl diff --git a/NDTensors/src/arraystorage/combiner/storage/contract_utils.jl b/NDTensors/src/backup/arraystorage/combiner/storage/contract_utils.jl similarity index 100% rename from NDTensors/src/arraystorage/combiner/storage/contract_utils.jl rename to NDTensors/src/backup/arraystorage/combiner/storage/contract_utils.jl diff --git a/NDTensors/src/arraystorage/combiner/storage/promote_rule.jl b/NDTensors/src/backup/arraystorage/combiner/storage/promote_rule.jl similarity index 100% rename from NDTensors/src/arraystorage/combiner/storage/promote_rule.jl rename to NDTensors/src/backup/arraystorage/combiner/storage/promote_rule.jl diff --git a/NDTensors/src/arraystorage/combiner/tensor/contract.jl b/NDTensors/src/backup/arraystorage/combiner/tensor/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/combiner/tensor/contract.jl rename to NDTensors/src/backup/arraystorage/combiner/tensor/contract.jl diff --git a/NDTensors/src/arraystorage/combiner/tensor/to_arraystorage.jl b/NDTensors/src/backup/arraystorage/combiner/tensor/to_arraystorage.jl similarity index 100% rename from NDTensors/src/arraystorage/combiner/tensor/to_arraystorage.jl rename to NDTensors/src/backup/arraystorage/combiner/tensor/to_arraystorage.jl diff --git a/NDTensors/src/arraystorage/diagonalarray/storage/contract.jl b/NDTensors/src/backup/arraystorage/diagonalarray/storage/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/diagonalarray/storage/contract.jl rename to NDTensors/src/backup/arraystorage/diagonalarray/storage/contract.jl diff --git a/NDTensors/src/arraystorage/diagonalarray/tensor/contract.jl b/NDTensors/src/backup/arraystorage/diagonalarray/tensor/contract.jl similarity index 100% rename from NDTensors/src/arraystorage/diagonalarray/tensor/contract.jl rename to NDTensors/src/backup/arraystorage/diagonalarray/tensor/contract.jl diff --git a/NDTensors/src/diag/diagtensor.jl b/NDTensors/src/diag/diagtensor.jl index 5dbbad661b..ba0a93a920 100644 --- a/NDTensors/src/diag/diagtensor.jl +++ b/NDTensors/src/diag/diagtensor.jl @@ -1,3 +1,5 @@ +using .DiagonalArrays: diaglength + const DiagTensor{ElT,N,StoreT,IndsT} = Tensor{ElT,N,StoreT,IndsT} where {StoreT<:Diag} const NonuniformDiagTensor{ElT,N,StoreT,IndsT} = Tensor{ElT,N,StoreT,IndsT} where {StoreT<:NonuniformDiag} diff --git a/NDTensors/src/lib/BlockSparseArrays/examples/Project.toml b/NDTensors/src/lib/BlockSparseArrays/examples/Project.toml new file mode 100644 index 0000000000..d1bf575ce0 --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/examples/Project.toml @@ -0,0 +1,4 @@ +[deps] +BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" +NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/NDTensors/src/lib/BlockSparseArrays/examples/README.jl b/NDTensors/src/lib/BlockSparseArrays/examples/README.jl index eb0ea1030f..f0a9edd4bb 100644 --- a/NDTensors/src/lib/BlockSparseArrays/examples/README.jl +++ b/NDTensors/src/lib/BlockSparseArrays/examples/README.jl @@ -6,11 +6,13 @@ # to store non-zero values, specifically a `Dictionary` from `Dictionaries.jl`. # `BlockArrays` reinterprets the `SparseArray` as a blocked data structure. -using NDTensors.BlockSparseArrays -using BlockArrays: BlockArrays, blockedrange -using Test +using BlockArrays: BlockArrays, PseudoBlockVector, blockedrange +using NDTensors.BlockSparseArrays: BlockSparseArray, block_nstored +using Test: @test, @test_broken function main() + Block = BlockArrays.Block + ## Block dimensions i1 = [2, 3] i2 = [2, 3] @@ -22,39 +24,54 @@ function main() end ## Data - nz_blocks = BlockArrays.Block.([(1, 1), (2, 2)]) + nz_blocks = Block.([(1, 1), (2, 2)]) nz_block_sizes = [block_size(i_axes, nz_block) for nz_block in nz_blocks] nz_block_lengths = prod.(nz_block_sizes) + ## Blocks with contiguous underlying data + d_data = PseudoBlockVector(randn(sum(nz_block_lengths)), nz_block_lengths) + d_blocks = [ + reshape(@view(d_data[Block(i)]), block_size(i_axes, nz_blocks[i])) for + i in 1:length(nz_blocks) + ] + b = BlockSparseArray(nz_blocks, d_blocks, i_axes) + + @test block_nstored(b) == 2 + ## Blocks with discontiguous underlying data d_blocks = randn.(nz_block_sizes) + b = BlockSparseArray(nz_blocks, d_blocks, i_axes) - ## Blocks with contiguous underlying data - ## d_data = PseudoBlockVector(randn(sum(nz_block_lengths)), nz_block_lengths) - ## d_blocks = [reshape(@view(d_data[Block(i)]), block_size(i_axes, nz_blocks[i])) for i in 1:length(nz_blocks)] - - B = BlockSparseArray(nz_blocks, d_blocks, i_axes) + @test block_nstored(b) == 2 ## Access a block - B[BlockArrays.Block(1, 1)] + @test b[Block(1, 1)] == d_blocks[1] - ## Access a non-zero block, returns a zero matrix - B[BlockArrays.Block(1, 2)] + ## Access a zero block, returns a zero matrix + @test b[Block(1, 2)] == zeros(2, 3) ## Set a zero block - B[BlockArrays.Block(1, 2)] = randn(2, 3) + a₁₂ = randn(2, 3) + b[Block(1, 2)] = a₁₂ + @test b[Block(1, 2)] == a₁₂ ## Matrix multiplication (not optimized for sparsity yet) - @test B * B ≈ Array(B) * Array(B) + @test b * b ≈ Array(b) * Array(b) - permuted_B = permutedims(B, (2, 1)) - @test permuted_B isa BlockSparseArray - @test permuted_B == permutedims(Array(B), (2, 1)) + permuted_b = permutedims(b, (2, 1)) + ## TODO: Fix this, broken. + @test_broken permuted_b isa BlockSparseArray + @test permuted_b == permutedims(Array(b), (2, 1)) - @test B + B ≈ Array(B) + Array(B) - @test 2B ≈ 2Array(B) + @test b + b ≈ Array(b) + Array(b) - @test reshape(B, ([4, 6, 6, 9],)) isa BlockSparseArray{<:Any,1} + scaled_b = 2b + @test scaled_b ≈ 2Array(b) + ## TODO: Fix this, broken. + @test_broken scaled_b isa BlockSparseArray + + ## TODO: Fix this, broken. + @test_broken reshape(b, ([4, 6, 6, 9],)) isa BlockSparseArray{<:Any,1} return nothing end @@ -63,8 +80,8 @@ main() # # BlockSparseArrays.jl and BlockArrays.jl interface -using NDTensors.BlockSparseArrays using BlockArrays: BlockArrays +using NDTensors.BlockSparseArrays: BlockSparseArray i1 = [2, 3] i2 = [2, 3] diff --git a/NDTensors/src/lib/BlockSparseArrays/src/BlockArraysExtensions/BlockArraysExtensions.jl b/NDTensors/src/lib/BlockSparseArrays/src/BlockArraysExtensions/BlockArraysExtensions.jl new file mode 100644 index 0000000000..6e84b0dd22 --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/BlockArraysExtensions/BlockArraysExtensions.jl @@ -0,0 +1,14 @@ +using BlockArrays: Block +using Dictionaries: Dictionary, Indices + +# TODO: Use `Tuple` conversion once +# BlockArrays.jl PR is merged. +block_to_cartesianindex(b::Block) = CartesianIndex(b.n) + +function blocks_to_cartesianindices(i::Indices{<:Block}) + return block_to_cartesianindex.(i) +end + +function blocks_to_cartesianindices(d::Dictionary{<:Block}) + return Dictionary(blocks_to_cartesianindices(eachindex(d)), d) +end diff --git a/NDTensors/src/lib/BlockSparseArrays/src/BlockSparseArrays.jl b/NDTensors/src/lib/BlockSparseArrays/src/BlockSparseArrays.jl index bda180c68d..b0a15209eb 100644 --- a/NDTensors/src/lib/BlockSparseArrays/src/BlockSparseArrays.jl +++ b/NDTensors/src/lib/BlockSparseArrays/src/BlockSparseArrays.jl @@ -1,44 +1,8 @@ module BlockSparseArrays -using ..AlgorithmSelection: Algorithm, @Algorithm_str -using BlockArrays: - AbstractBlockArray, - BlockArrays, - BlockVector, - Block, - BlockIndex, - BlockRange, - BlockedUnitRange, - findblockindex, - block, - blockaxes, - blockcheckbounds, - blockfirsts, - blocklasts, - blocklength, - blocklengths, - blockedrange, - blocks -using Compat: Returns, allequal -using Dictionaries: Dictionary, Indices, getindices, set! # TODO: Move to `SparseArraysExtensions`. -using LinearAlgebra: Hermitian -using SplitApplyCombine: groupcount - -export BlockSparseArray, SparseArray - -include("tensor_product.jl") -include("base.jl") -include("axes.jl") -include("abstractarray.jl") -include("permuteddimsarray.jl") -include("blockarrays.jl") -# TODO: Split off into `SparseArraysExtensions` module, rename to `SparseArrayDOK`. -include("sparsearray.jl") -include("blocksparsearray.jl") -include("allocate_output.jl") -include("subarray.jl") -include("broadcast.jl") -include("fusedims.jl") -include("gradedrange.jl") -include("LinearAlgebraExt/LinearAlgebraExt.jl") - +include("blocksparsearrayinterface/blocksparsearrayinterface.jl") +include("blocksparsearrayinterface/blockzero.jl") +include("abstractblocksparsearray/abstractblocksparsearray.jl") +include("blocksparsearray/defaults.jl") +include("blocksparsearray/blocksparsearray.jl") +include("BlockArraysExtensions/BlockArraysExtensions.jl") end diff --git a/NDTensors/src/lib/BlockSparseArrays/src/abstractblocksparsearray/abstractblocksparsearray.jl b/NDTensors/src/lib/BlockSparseArrays/src/abstractblocksparsearray/abstractblocksparsearray.jl new file mode 100644 index 0000000000..f63531179c --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/abstractblocksparsearray/abstractblocksparsearray.jl @@ -0,0 +1,46 @@ +using BlockArrays: BlockArrays, AbstractBlockArray, Block, BlockIndex + +# TODO: Delete this. This function was replaced +# by `nstored` but is still used in `NDTensors`. +function nonzero_keys end + +abstract type AbstractBlockSparseArray{T,N} <: AbstractBlockArray{T,N} end + +# Base `AbstractArray` interface +Base.axes(::AbstractBlockSparseArray) = error("Not implemented") + +# BlockArrays `AbstractBlockArray` interface +BlockArrays.blocks(::AbstractBlockSparseArray) = error("Not implemented") + +blocktype(a::AbstractBlockSparseArray) = eltype(blocks(a)) + +# Base `AbstractArray` interface +function Base.getindex(a::AbstractBlockSparseArray{<:Any,N}, I::Vararg{Int,N}) where {N} + return blocksparse_getindex(a, I...) +end + +function Base.setindex!( + a::AbstractBlockSparseArray{<:Any,N}, value, I::Vararg{Int,N} +) where {N} + blocksparse_setindex!(a, value, I...) + return a +end + +function Base.setindex!( + a::AbstractBlockSparseArray{<:Any,N}, value, I::BlockIndex{N} +) where {N} + blocksparse_setindex!(a, value, I) + return a +end + +function Base.setindex!(a::AbstractBlockSparseArray{<:Any,N}, value, I::Block{N}) where {N} + blocksparse_setindex!(a, value, I) + return a +end + +# `BlockArrays` interface +function BlockArrays.viewblock( + a::AbstractBlockSparseArray{<:Any,N}, I::Block{N,Int} +) where {N} + return blocksparse_viewblock(a, I) +end diff --git a/NDTensors/src/lib/BlockSparseArrays/src/backup/BlockSparseArrays.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/BlockSparseArrays.jl new file mode 100644 index 0000000000..05021690a9 --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/backup/BlockSparseArrays.jl @@ -0,0 +1,45 @@ +module BlockSparseArrays +using ..AlgorithmSelection: Algorithm, @Algorithm_str +using BlockArrays: + AbstractBlockArray, + BlockArrays, + BlockVector, + Block, + BlockIndex, + BlockRange, + BlockedUnitRange, + findblockindex, + block, + blockaxes, + blockcheckbounds, + blockfirsts, + blocklasts, + blocklength, + blocklengths, + blockedrange, + blocks +using Compat: Returns, allequal +using Dictionaries: Dictionary, Indices, getindices, set! # TODO: Move to `SparseArraysExtensions`. +using LinearAlgebra: Hermitian +using SplitApplyCombine: groupcount + +export BlockSparseArray # , SparseArray + +include("defaults.jl") +include("tensor_product.jl") +include("base.jl") +include("axes.jl") +include("abstractarray.jl") +include("permuteddimsarray.jl") +include("blockarrays.jl") +# TODO: Split off into `SparseArraysExtensions` module, rename to `SparseArrayDOK`. +# include("sparsearray.jl") +include("blocksparsearray.jl") +include("allocate_output.jl") +include("subarray.jl") +include("broadcast.jl") +include("fusedims.jl") +include("gradedrange.jl") +include("LinearAlgebraExt/LinearAlgebraExt.jl") + +end diff --git a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/LinearAlgebraExt.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/LinearAlgebraExt.jl similarity index 81% rename from NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/LinearAlgebraExt.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/LinearAlgebraExt.jl index 912fbab79b..79dac54d9c 100644 --- a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/LinearAlgebraExt.jl +++ b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/LinearAlgebraExt.jl @@ -1,7 +1,7 @@ module LinearAlgebraExt using ...AlgorithmSelection: Algorithm, @Algorithm_str using BlockArrays: BlockArrays, blockedrange, blocks -using ..BlockSparseArrays: SparseArray, nonzero_keys # TODO: Move to `SparseArraysExtensions` module, rename `SparseArrayDOK`. +using ..BlockSparseArrays: nonzero_keys # TODO: Move to `SparseArraysExtensions` module, rename `SparseArrayDOK`. using ..BlockSparseArrays: BlockSparseArrays, BlockSparseArray, nonzero_blockkeys using LinearAlgebra: LinearAlgebra, Hermitian, Transpose, I, eigen, qr using SparseArrays: SparseArrays, SparseMatrixCSC, spzeros, sparse diff --git a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/eigen.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/eigen.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/eigen.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/eigen.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/hermitian.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/hermitian.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/hermitian.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/hermitian.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/qr.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/qr.jl similarity index 93% rename from NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/qr.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/qr.jl index f0a14ed1d2..aac889c446 100644 --- a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/qr.jl +++ b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/qr.jl @@ -1,3 +1,5 @@ +using ...SparseArrayDOKs: SparseArrayDOK + # Check if the matrix has 1 or fewer entries # per row/column. function is_permutation_matrix(a::SparseMatrixCSC) @@ -6,7 +8,7 @@ end # Check if the matrix has 1 or fewer entries # per row/column. -function is_permutation_matrix(a::SparseArray{<:Any,2}) +function is_permutation_matrix(a::SparseArrayDOK{<:Any,2}) keys = collect(Iterators.map(Tuple, nonzero_keys(a))) I = first.(keys) J = last.(keys) @@ -17,7 +19,8 @@ function findnonzerorows(a::SparseMatrixCSC, col) return view(a.rowval, a.colptr[col]:(a.colptr[col + 1] - 1)) end -function SparseArrays.SparseMatrixCSC(a::SparseArray{<:Any,2}) +# TODO: Is this already defined? +function SparseArrays.SparseMatrixCSC(a::SparseArrayDOK{<:Any,2}) # Not defined: # a_csc = SparseMatrixCSC{eltype(a)}(size(a)) a_csc = spzeros(eltype(a), size(a)) @@ -27,8 +30,11 @@ function SparseArrays.SparseMatrixCSC(a::SparseArray{<:Any,2}) return a_csc end +# TODO: Is this already defined? # Get the sparse structure of a SparseArray as a SparseMatrixCSC. -function sparse_structure(structure_type::Type{<:SparseMatrixCSC}, a::SparseArray{<:Any,2}) +function sparse_structure( + structure_type::Type{<:SparseMatrixCSC}, a::SparseArrayDOK{<:Any,2} +) # Idealy would work but a bit too complicated for `map` right now: # return SparseMatrixCSC(map(x -> iszero(x) ? false : true, a)) # TODO: Change to `spzeros(Bool, size(a))`. diff --git a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/svd.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/svd.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/svd.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/svd.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/transpose.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/transpose.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/LinearAlgebraExt/transpose.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/LinearAlgebraExt/transpose.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/abstractarray.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/abstractarray.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/abstractarray.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/abstractarray.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/allocate_output.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/allocate_output.jl similarity index 93% rename from NDTensors/src/lib/BlockSparseArrays/src/allocate_output.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/allocate_output.jl index 7591f8b126..2e525c6f39 100644 --- a/NDTensors/src/lib/BlockSparseArrays/src/allocate_output.jl +++ b/NDTensors/src/lib/BlockSparseArrays/src/backup/allocate_output.jl @@ -89,6 +89,12 @@ end # SparseArray ############################################################################# +using ..SparseArrayDOKs: SparseArrayDOK + +const SparseArrayLike{T,N,Zero} = Union{ + SparseArrayDOK{T,N,Zero},PermutedDimsArray{T,N,<:Any,<:Any,SparseArrayDOK{T,N,Zero}} +} + # TODO: Maybe store nonzero locations? # TODO: Make this backwards compatible. # TODO: Add a default for `eltype` and `zero`. @@ -100,7 +106,7 @@ Base.@kwdef struct SparseArrayStructure{ElType,Axes,Zero} <: zero::Zero end -function allocate(arraytype::Type{<:SparseArray}, structure::SparseArrayStructure) +function allocate(arraytype::Type{<:SparseArrayDOK}, structure::SparseArrayStructure) # TODO: Use `set_eltype`. return arraytype{structure.eltype}(structure.axes, structure.zero) end diff --git a/NDTensors/src/lib/BlockSparseArrays/src/axes.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/axes.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/axes.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/axes.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/base.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/base.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/base.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/base.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/blockarrays.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/blockarrays.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/blockarrays.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/blockarrays.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/blocksparsearray.jl similarity index 94% rename from NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/blocksparsearray.jl index c76af00be9..23e43e0fc5 100644 --- a/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray.jl +++ b/NDTensors/src/lib/BlockSparseArrays/src/backup/blocksparsearray.jl @@ -1,9 +1,14 @@ using BlockArrays: block +## using ..SparseArrayDOKs: SparseArrayDOK # Also add a version with contiguous underlying data. struct BlockSparseArray{ - T,N,A<:AbstractArray{T,N},Blocks<:SparseArray{A,N},Axes<:NTuple{N,AbstractUnitRange{Int}} -} <: AbstractBlockArray{T,N} + T, + N, + A<:AbstractArray{T,N}, + Blocks<:AbstractArray{A,N}, + Axes<:NTuple{N,AbstractUnitRange{Int}}, +} <: AbstractBlockSparseArray{T,N} blocks::Blocks axes::Axes end @@ -26,7 +31,7 @@ function Base.reshape(a::BlockSparseArray, ax::Tuple{Vararg{AbstractUnitRange}}) ## blocks_reshaped = reshape(blocks(a), blocklength.(ax)) blocktype_reshaped = set_ndims(blocktype(a), length(ax)) # TODO: Some other way of getting `zero` function? - blocks_reshaped = SparseArray( + blocks_reshaped = default_sparsearray_type()( Dictionary{CartesianIndex{length(ax)},blocktype_reshaped}(), blocklength.(ax), BlockZero(ax), @@ -60,6 +65,10 @@ struct BlockZero{Axes} axes::Axes end +function (f::BlockZero)(a::AbstractArray, I::CartesianIndex) + return f(eltype(a), I) +end + function (f::BlockZero)( arraytype::Type{<:AbstractArray{<:Any,N}}, I::CartesianIndex{N} ) where {N} @@ -113,7 +122,7 @@ end ## axes::Axes ## end -function BlockSparseArray(a::SparseArray, axes::Tuple{Vararg{AbstractUnitRange}}) +function BlockSparseArray(a::AbstractArray, axes::Tuple{Vararg{AbstractUnitRange}}) A = eltype(a) T = eltype(A) N = ndims(a) @@ -132,7 +141,9 @@ function BlockSparseArray( map(block -> CartesianIndex(inttuple(block)), blocks) end cartesiandata = Dictionary(cartesianblocks, blockdata) - block_storage = SparseArray(cartesiandata, blocklength.(axes), BlockZero(axes)) + block_storage = default_sparsearray_type()( + cartesiandata, blocklength.(axes), BlockZero(axes) + ) return BlockSparseArray(block_storage, axes) end @@ -269,6 +280,8 @@ function Base.permutedims!(a_src::BlockSparseArray, a_dest::BlockSparseArray, pe return a_src end +# TODO: Call `map(b -> permutedims(b, perm), blocks(a))` +# or something like that. function Base.permutedims(a::BlockSparseArray, perm) a_dest = zero(PermutedDimsArray(a, perm)) permutedims!(a_dest, a, perm) diff --git a/NDTensors/src/lib/BlockSparseArrays/src/broadcast.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/broadcast.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/broadcast.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/broadcast.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/backup/defaults.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/defaults.jl new file mode 100644 index 0000000000..9eb302e64d --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/backup/defaults.jl @@ -0,0 +1,3 @@ +using ..SparseArrayDOKs: SparseArrayDOK + +default_sparsearray_type() = SparseArrayDOK diff --git a/NDTensors/src/lib/BlockSparseArrays/src/fusedims.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/fusedims.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/fusedims.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/fusedims.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/gradedrange.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/gradedrange.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/gradedrange.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/gradedrange.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/permuteddimsarray.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/permuteddimsarray.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/permuteddimsarray.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/permuteddimsarray.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/sparsearray.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/sparsearray.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/sparsearray.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/sparsearray.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/subarray.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/subarray.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/subarray.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/subarray.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/tensor_product.jl b/NDTensors/src/lib/BlockSparseArrays/src/backup/tensor_product.jl similarity index 100% rename from NDTensors/src/lib/BlockSparseArrays/src/tensor_product.jl rename to NDTensors/src/lib/BlockSparseArrays/src/backup/tensor_product.jl diff --git a/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray/blocksparsearray.jl b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray/blocksparsearray.jl new file mode 100644 index 0000000000..b5912ed940 --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray/blocksparsearray.jl @@ -0,0 +1,70 @@ +using BlockArrays: BlockArrays, Block, BlockedUnitRange, blockedrange, blocklength +using Dictionaries: Dictionary +using ..SparseArrayDOKs: SparseArrayDOK + +# TODO: Delete this. +## using BlockArrays: blocks + +struct BlockSparseArray{ + T, + N, + A<:AbstractArray{T,N}, + Blocks<:AbstractArray{A,N}, + Axes<:NTuple{N,AbstractUnitRange{Int}}, +} <: AbstractBlockSparseArray{T,N} + blocks::Blocks + axes::Axes +end + +function BlockSparseArray( + block_data::Dictionary{<:Block{N},<:AbstractArray{<:Any,N}}, + axes::Tuple{Vararg{AbstractUnitRange,N}}, +) where {N} + blocks = default_blocks(block_data, axes) + return BlockSparseArray(blocks, axes) +end + +function BlockSparseArray( + block_indices::Vector{<:Block{N}}, + block_data::Vector{<:AbstractArray{<:Any,N}}, + axes::Tuple{Vararg{AbstractUnitRange,N}}, +) where {N} + return BlockSparseArray(Dictionary(block_indices, block_data), axes) +end + +function BlockSparseArray{T,N}( + blocks::AbstractArray{<:AbstractArray{T,N},N}, axes::Tuple{Vararg{AbstractUnitRange,N}} +) where {T,N} + return BlockSparseArray{T,N,eltype(blocks),typeof(blocks),typeof(axes)}(blocks, axes) +end + +function BlockSparseArray{T,N}( + block_data::Dictionary{Block{N,Int},<:AbstractArray{T,N}}, + axes::Tuple{Vararg{AbstractUnitRange,N}}, +) where {T,N} + blocks = default_blocks(block_data, axes) + return BlockSparseArray{T,N}(blocks, axes) +end + +function BlockSparseArray{T,N}(axes::Tuple{Vararg{AbstractUnitRange,N}}) where {T,N} + blocks = default_blocks(T, axes) + return BlockSparseArray{T,N}(blocks, axes) +end + +function BlockSparseArray{T,N}(dims::Tuple{Vararg{Vector{Int},N}}) where {T,N} + return BlockSparseArray{T,N}(blockedrange.(dims)) +end + +function BlockSparseArray{T}(dims::Tuple{Vararg{Vector{Int}}}) where {T} + return BlockSparseArray{T,length(dims)}(dims) +end + +function BlockSparseArray{T}(dims::Vararg{Vector{Int}}) where {T} + return BlockSparseArray{T}(dims) +end + +# Base `AbstractArray` interface +Base.axes(a::BlockSparseArray) = a.axes + +# BlockArrays `AbstractBlockArray` interface +BlockArrays.blocks(a::BlockSparseArray) = a.blocks diff --git a/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray/defaults.jl b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray/defaults.jl new file mode 100644 index 0000000000..c4c380830f --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearray/defaults.jl @@ -0,0 +1,42 @@ +using BlockArrays: Block +using Dictionaries: Dictionary +using ..SparseArrayDOKs: SparseArrayDOK + +# Construct the sparse structure storing the blocks +function default_blockdata( + block_data::Dictionary{<:CartesianIndex{N},<:AbstractArray{<:Any,N}}, + axes::Tuple{Vararg{AbstractUnitRange,N}}, +) where {N} + return error() +end + +function default_blocks( + block_indices::Vector{<:Block{N}}, + block_data::Vector{<:AbstractArray{<:Any,N}}, + axes::Tuple{Vararg{AbstractUnitRange,N}}, +) where {N} + return default_blocks(Dictionary(block_indices, block_data), axes) +end + +function default_blocks( + block_data::Dictionary{<:Block{N},<:AbstractArray{<:Any,N}}, + axes::Tuple{Vararg{AbstractUnitRange,N}}, +) where {N} + return default_blocks(blocks_to_cartesianindices(block_data), axes) +end + +function default_arraytype(elt::Type, axes::Tuple{Vararg{AbstractUnitRange}}) + return Array{elt,length(axes)} +end + +function default_blocks(elt::Type, axes::Tuple{Vararg{AbstractUnitRange}}) + block_data = Dictionary{Block{length(axes),Int},default_arraytype(elt, axes)}() + return default_blocks(block_data, axes) +end + +function default_blocks( + block_data::Dictionary{<:CartesianIndex{N},<:AbstractArray{<:Any,N}}, + axes::Tuple{Vararg{AbstractUnitRange,N}}, +) where {N} + return SparseArrayDOK(block_data, blocklength.(axes), BlockZero(axes)) +end diff --git a/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearrayinterface/blocksparsearrayinterface.jl b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearrayinterface/blocksparsearrayinterface.jl new file mode 100644 index 0000000000..3d8b5b4939 --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearrayinterface/blocksparsearrayinterface.jl @@ -0,0 +1,40 @@ +using BlockArrays: Block, BlockIndex, block, blocks, blockcheckbounds, findblockindex +using ..SparseArrayInterface: nstored + +function blocksparse_getindex(a::AbstractArray{<:Any,N}, I::Vararg{Int,N}) where {N} + @boundscheck checkbounds(a, I...) + return a[findblockindex.(axes(a), I)...] +end + +function blocksparse_setindex!(a::AbstractArray{<:Any,N}, value, I::Vararg{Int,N}) where {N} + @boundscheck checkbounds(a, I...) + a[findblockindex.(axes(a), I)...] = value + return a +end + +function blocksparse_setindex!(a::AbstractArray{<:Any,N}, value, I::BlockIndex{N}) where {N} + a_b = view(a, block(I)) + a_b[I.α...] = value + # Set the block, required if it is structurally zero + a[block(I)] = a_b + return a +end + +function blocksparse_setindex!(a::AbstractArray{<:Any,N}, value, I::Block{N}) where {N} + # TODO: Create a conversion function, say `CartesianIndex(Int.(Tuple(I)))`. + i = I.n + @boundscheck blockcheckbounds(a, i...) + blocks(a)[i...] = value + return a +end + +function blocksparse_viewblock(a::AbstractArray{<:Any,N}, I::Block{N}) where {N} + # TODO: Create a conversion function, say `CartesianIndex(Int.(Tuple(I)))`. + i = I.n + @boundscheck blockcheckbounds(a, i...) + return blocks(a)[i...] +end + +function block_nstored(a::AbstractArray) + return nstored(blocks(a)) +end diff --git a/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearrayinterface/blockzero.jl b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearrayinterface/blockzero.jl new file mode 100644 index 0000000000..707a399240 --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/src/blocksparsearrayinterface/blockzero.jl @@ -0,0 +1,43 @@ +using BlockArrays: Block, blockedrange + +# Extensions to BlockArrays.jl +blocktuple(b::Block) = Block.(b.n) +inttuple(b::Block) = b.n + +# The size of a block +function block_size(axes::Tuple{Vararg{AbstractUnitRange}}, block::Block) + return length.(getindex.(axes, blocktuple(block))) +end + +# The size of a block +function block_size(blockinds::Tuple{Vararg{AbstractVector}}, block::Block) + return block_size(blockedrange.(blockinds), block) +end + +struct BlockZero{Axes} + axes::Axes +end + +function (f::BlockZero)(a::AbstractArray, I::CartesianIndex) + return f(eltype(a), I) +end + +function (f::BlockZero)( + arraytype::Type{<:AbstractArray{<:Any,N}}, I::CartesianIndex{N} +) where {N} + # TODO: Make sure this works for sparse or block sparse blocks, immutable + # blocks, diagonal blocks, etc.! + return fill!(arraytype(undef, block_size(f.axes, Block(Tuple(I)))), false) +end + +# Fallback so that `SparseArray` with scalar elements works. +function (f::BlockZero)(blocktype::Type{<:Number}, I::CartesianIndex) + return zero(blocktype) +end + +# Fallback to Array if it is abstract +function (f::BlockZero)( + arraytype::Type{AbstractArray{T,N}}, I::CartesianIndex{N} +) where {T,N} + return fill!(Array{T,N}(undef, block_size(f.axes, Block(Tuple(I)))), zero(T)) +end diff --git a/NDTensors/src/lib/BlockSparseArrays/test/Project.toml b/NDTensors/src/lib/BlockSparseArrays/test/Project.toml index 0849fc085d..0946bbaa98 100644 --- a/NDTensors/src/lib/BlockSparseArrays/test/Project.toml +++ b/NDTensors/src/lib/BlockSparseArrays/test/Project.toml @@ -1,3 +1,4 @@ [deps] BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" Compat = "34da2185-b29b-5c13-b0c7-acf172513d20" +NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" diff --git a/NDTensors/src/lib/BlockSparseArrays/test/backup/runtests.jl b/NDTensors/src/lib/BlockSparseArrays/test/backup/runtests.jl new file mode 100644 index 0000000000..661d4b6b8e --- /dev/null +++ b/NDTensors/src/lib/BlockSparseArrays/test/backup/runtests.jl @@ -0,0 +1,147 @@ +@eval module $(gensym()) +using Test: @test, @testset +using NDTensors.BlockSparseArrays: BlockSparseArray, block_nstored +using BlockArrays: BlockedUnitRange, blockedrange, blocklength, blocksize +## using Compat: allequal +## using LinearAlgebra: Diagonal, Hermitian, eigen, qr +## using NDTensors: contract +## using NDTensors.BlockSparseArrays: +## BlockSparseArrays, BlockSparseArray, gradedrange, nonzero_blockkeys, fusedims +## using ITensors: QN + +include("TestBlockSparseArraysUtils.jl") + +@testset "BlockSparseArrays (eltype=$elt)" for elt in + (Float32, Float64, ComplexF32, ComplexF64) + @testset "Basics" begin + a = BlockSparseArray{elt}([2, 3], [2, 3]) + @test eltype(a) === elt + @test axes(a) == (1:5, 1:5) + @test all(aᵢ -> aᵢ isa BlockedUnitRange, axes(a)) + @test blocklength.(axes(a)) == (2, 2) + @test blocksize(a) == (2, 2) + @test size(a) == (5, 5) + @test block_nstored(a) == 0 + @test iszero(a) + @test all(I -> iszero(a[I]), eachindex(a)) + + a = BlockSparseArray{elt}([2, 3], [2, 3]) + a[3, 3] = 33 + @test eltype(a) === elt + @test axes(a) == (1:5, 1:5) + @test all(aᵢ -> aᵢ isa BlockedUnitRange, axes(a)) + @test blocklength.(axes(a)) == (2, 2) + @test blocksize(a) == (2, 2) + @test size(a) == (5, 5) + @test block_nstored(a) == 1 + @test !iszero(a) + @test a[3, 3] == 33 + @test all(eachindex(a)) do I + if I == CartesianIndex(3, 3) + a[I] == 33 + else + iszero(a[I]) + end + end + end +end + +## ## @testset "README" begin +## ## @test include( +## ## joinpath( +## ## pkgdir(BlockSparseArrays), +## ## "src", +## ## "lib", +## ## "BlockSparseArrays", +## ## "examples", +## ## "README.jl", +## ## ), +## ## ) isa Any +## ## end +## @testset "exports $s" for s in [:BlockSparseArray] +## @test Base.isexported(BlockSparseArrays, s) +## end +## ## @testset "Mixed block test" begin +## ## blocks = [BlockArrays.Block(1, 1), BlockArrays.Block(2, 2)] +## ## block_data = [randn(2, 2)', randn(3, 3)] +## ## inds = ([2, 3], [2, 3]) +## ## A = BlockSparseArray(blocks, block_data, inds) +## ## @test A[BlockArrays.Block(1, 1)] == block_data[1] +## ## @test A[BlockArrays.Block(1, 2)] == zeros(2, 3) +## ## end +## ## TODO: Move to a `BlockSparseArraysGradedAxesExt` +## ## module. Also get it working. +## ## @testset "fusedims" begin +## ## elt = Float64 +## ## d = [2, 3] +## ## sectors = [QN(0, 2), QN(1, 2)] +## ## i = gradedrange(d, sectors) + +## ## B = BlockSparseArray{elt}(i, i, i, i) +## ## B[BlockArrays.Block(1, 1, 1, 1)] = randn(2, 2, 2, 2) +## ## B[BlockArrays.Block(2, 2, 2, 2)] = randn(3, 3, 3, 3) +## ## @test size(B) == (5, 5, 5, 5) +## ## @test blocksize(B) == (2, 2, 2, 2) +## ## # TODO: Define `nnz`. +## ## @test length(nonzero_blockkeys(B)) == 2 + +## ## B_sub = B[ +## ## [BlockArrays.Block(2)], +## ## [BlockArrays.Block(2)], +## ## [BlockArrays.Block(2)], +## ## [BlockArrays.Block(2)], +## ## ] +## ## @test B_sub isa BlockSparseArray{elt,4} +## ## @test B[BlockArrays.Block(2, 2, 2, 2)] == B_sub[BlockArrays.Block(1, 1, 1, 1)] +## ## @test size(B_sub) == (3, 3, 3, 3) +## ## @test blocksize(B_sub) == (1, 1, 1, 1) +## ## # TODO: Define `nnz`. +## ## @test length(nonzero_blockkeys(B_sub)) == 1 + +## ## B_fused = fusedims(B, (1, 2), (3, 4)) +## ## @test B_fused isa BlockSparseArray{elt,2} +## ## @test size(B_fused) == (25, 25) +## ## @test blocksize(B_fused) == (2, 2) +## ## # TODO: Define `nnz`. +## ## # This is broken because it allocates all blocks, +## ## # need to fix that. +## ## @test_broken length(nonzero_blockkeys(B_fused)) == 2 +## ## end + +## ## TODO: Move this to `BlockSparseArraysTensorAlgebraExt`. +## ## Also move most of the `contract` logic to +## ## `SparseArrayInterfaceTensorAlgebraExt`. +## @testset "contract (eltype=$elt)" for elt in (Float32, ComplexF32, Float64, ComplexF64) +## d1, d2, d3, d4 = [2, 3], [3, 4], [4, 5], [5, 6] +## a1 = BlockSparseArray{elt}(d1, d2, d3) +## TestBlockSparseArraysUtils.set_blocks!(a1, randn, b -> iseven(sum(b.n))) +## a2 = BlockSparseArray{elt}(d2, d4) +## TestBlockSparseArraysUtils.set_blocks!(a2, randn, b -> iseven(sum(b.n))) +## a_dest, labels_dest = contract(a1, (1, -1, 2), a2, (-1, 3)) +## @test labels_dest == (1, 2, 3) +## @test eltype(a_dest) == elt +## # TODO: Output `labels_dest` as well. +## a_dest_dense = contract(Array(a1), (1, -1, 2), Array(a2), (-1, 3)) +## @test a_dest ≈ a_dest_dense +## end +## @testset "qr (eltype=$elt, dims=$d)" for elt in +## (Float32, ComplexF32, Float64, ComplexF64), +## d in (([3, 4], [2, 3]), ([2, 3], [3, 4]), ([2, 3], [3]), ([3, 4], [2]), ([2], [3, 4])) + +## @test_broken error() +## # a = BlockSparseArray{elt}(d) +## # TestBlockSparseArraysUtils.set_blocks!(a, randn, b -> iseven(sum(b.n))) +## # q, r = qr(a) +## # @test q * r ≈ a +## end +## @testset "eigen (eltype=$elt)" for elt in (Float32, ComplexF32, Float64, ComplexF64) +## @test_broken error() +## # d1, d2 = [2, 3], [2, 3] +## # a = BlockSparseArray{elt}(d1, d2) +## # TestBlockSparseArraysUtils.set_blocks!(a, randn, b -> allequal(b.n)) +## # d, u = eigen(Hermitian(a)) +## # @test eltype(d) == real(elt) +## # @test eltype(u) == elt +## # @test Hermitian(Matrix(a)) * Matrix(u) ≈ Matrix(u) * Diagonal(Vector(d)) +## end +end diff --git a/NDTensors/src/lib/BlockSparseArrays/test/runtests.jl b/NDTensors/src/lib/BlockSparseArrays/test/runtests.jl index 9f8bc253e3..9cde778ee9 100644 --- a/NDTensors/src/lib/BlockSparseArrays/test/runtests.jl +++ b/NDTensors/src/lib/BlockSparseArrays/test/runtests.jl @@ -1,105 +1,40 @@ @eval module $(gensym()) -using Test: @test, @testset, @test_broken -using BlockArrays: BlockArrays, BlockRange, blocksize -using Compat: allequal -using LinearAlgebra: Diagonal, Hermitian, eigen, qr -using NDTensors: contract -using NDTensors.BlockSparseArrays: - BlockSparseArrays, BlockSparseArray, gradedrange, nonzero_blockkeys, fusedims -using ITensors: QN - +using Test: @test, @testset +using NDTensors.BlockSparseArrays: BlockSparseArray, block_nstored +using BlockArrays: BlockedUnitRange, blockedrange, blocklength, blocksize include("TestBlockSparseArraysUtils.jl") +@testset "BlockSparseArrays (eltype=$elt)" for elt in + (Float32, Float64, ComplexF32, ComplexF64) + @testset "Basics" begin + a = BlockSparseArray{elt}([2, 3], [2, 3]) + @test eltype(a) === elt + @test axes(a) == (1:5, 1:5) + @test all(aᵢ -> aᵢ isa BlockedUnitRange, axes(a)) + @test blocklength.(axes(a)) == (2, 2) + @test blocksize(a) == (2, 2) + @test size(a) == (5, 5) + @test block_nstored(a) == 0 + @test iszero(a) + @test all(I -> iszero(a[I]), eachindex(a)) -@testset "Test NDTensors.BlockSparseArrays" begin - @testset "README" begin - @test include( - joinpath( - pkgdir(BlockSparseArrays), - "src", - "lib", - "BlockSparseArrays", - "examples", - "README.jl", - ), - ) isa Any - end - @testset "exports $s" for s in [:BlockSparseArray] - @test Base.isexported(BlockSparseArrays, s) - end - @testset "Mixed block test" begin - blocks = [BlockArrays.Block(1, 1), BlockArrays.Block(2, 2)] - block_data = [randn(2, 2)', randn(3, 3)] - inds = ([2, 3], [2, 3]) - A = BlockSparseArray(blocks, block_data, inds) - @test A[BlockArrays.Block(1, 1)] == block_data[1] - @test A[BlockArrays.Block(1, 2)] == zeros(2, 3) - end - @testset "fusedims" begin - elt = Float64 - d = [2, 3] - sectors = [QN(0, 2), QN(1, 2)] - i = gradedrange(d, sectors) - - B = BlockSparseArray{elt}(i, i, i, i) - B[BlockArrays.Block(1, 1, 1, 1)] = randn(2, 2, 2, 2) - B[BlockArrays.Block(2, 2, 2, 2)] = randn(3, 3, 3, 3) - @test size(B) == (5, 5, 5, 5) - @test blocksize(B) == (2, 2, 2, 2) - # TODO: Define `nnz`. - @test length(nonzero_blockkeys(B)) == 2 - - B_sub = B[ - [BlockArrays.Block(2)], - [BlockArrays.Block(2)], - [BlockArrays.Block(2)], - [BlockArrays.Block(2)], - ] - @test B_sub isa BlockSparseArray{elt,4} - @test B[BlockArrays.Block(2, 2, 2, 2)] == B_sub[BlockArrays.Block(1, 1, 1, 1)] - @test size(B_sub) == (3, 3, 3, 3) - @test blocksize(B_sub) == (1, 1, 1, 1) - # TODO: Define `nnz`. - @test length(nonzero_blockkeys(B_sub)) == 1 - - B_fused = fusedims(B, (1, 2), (3, 4)) - @test B_fused isa BlockSparseArray{elt,2} - @test size(B_fused) == (25, 25) - @test blocksize(B_fused) == (2, 2) - # TODO: Define `nnz`. - # This is broken because it allocates all blocks, - # need to fix that. - @test_broken length(nonzero_blockkeys(B_fused)) == 2 - end - @testset "contract (eltype=$elt)" for elt in (Float32, ComplexF32, Float64, ComplexF64) - d1, d2, d3, d4 = [2, 3], [3, 4], [4, 5], [5, 6] - a1 = BlockSparseArray{elt}(d1, d2, d3) - TestBlockSparseArraysUtils.set_blocks!(a1, randn, b -> iseven(sum(b.n))) - a2 = BlockSparseArray{elt}(d2, d4) - TestBlockSparseArraysUtils.set_blocks!(a2, randn, b -> iseven(sum(b.n))) - a_dest, labels_dest = contract(a1, (1, -1, 2), a2, (-1, 3)) - @test labels_dest == (1, 2, 3) - @test eltype(a_dest) == elt - # TODO: Output `labels_dest` as well. - a_dest_dense = contract(Array(a1), (1, -1, 2), Array(a2), (-1, 3)) - @test a_dest ≈ a_dest_dense - end - @testset "qr (eltype=$elt, dims=$d)" for elt in - (Float32, ComplexF32, Float64, ComplexF64), - d in (([3, 4], [2, 3]), ([2, 3], [3, 4]), ([2, 3], [3]), ([3, 4], [2]), ([2], [3, 4])) - - a = BlockSparseArray{elt}(d) - TestBlockSparseArraysUtils.set_blocks!(a, randn, b -> iseven(sum(b.n))) - q, r = qr(a) - @test q * r ≈ a - end - @testset "eigen (eltype=$elt)" for elt in (Float32, ComplexF32, Float64, ComplexF64) - d1, d2 = [2, 3], [2, 3] - a = BlockSparseArray{elt}(d1, d2) - TestBlockSparseArraysUtils.set_blocks!(a, randn, b -> allequal(b.n)) - d, u = eigen(Hermitian(a)) - @test eltype(d) == real(elt) - @test eltype(u) == elt - @test Hermitian(Matrix(a)) * Matrix(u) ≈ Matrix(u) * Diagonal(Vector(d)) + a = BlockSparseArray{elt}([2, 3], [2, 3]) + a[3, 3] = 33 + @test eltype(a) === elt + @test axes(a) == (1:5, 1:5) + @test all(aᵢ -> aᵢ isa BlockedUnitRange, axes(a)) + @test blocklength.(axes(a)) == (2, 2) + @test blocksize(a) == (2, 2) + @test size(a) == (5, 5) + @test block_nstored(a) == 1 + @test !iszero(a) + @test a[3, 3] == 33 + @test all(eachindex(a)) do I + if I == CartesianIndex(3, 3) + a[I] == 33 + else + iszero(a[I]) + end + end end end end diff --git a/NDTensors/src/lib/DiagonalArrays/src/DiagonalArrays.jl b/NDTensors/src/lib/DiagonalArrays/src/DiagonalArrays.jl index 76335ef77a..3b8e5ff9a4 100644 --- a/NDTensors/src/lib/DiagonalArrays/src/DiagonalArrays.jl +++ b/NDTensors/src/lib/DiagonalArrays/src/DiagonalArrays.jl @@ -1,11 +1,12 @@ module DiagonalArrays -include("defaults.jl") -include("diaginterface.jl") -include("diagindex.jl") -include("diagindices.jl") -include("diagonalarray.jl") -include("sparsearrayinterface.jl") -include("diagonalarraydiaginterface.jl") -include("diagonalmatrix.jl") -include("diagonalvector.jl") +include("diaginterface/defaults.jl") +include("diaginterface/diaginterface.jl") +include("diaginterface/diagindex.jl") +include("diaginterface/diagindices.jl") +include("abstractdiagonalarray/abstractdiagonalarray.jl") +include("abstractdiagonalarray/sparsearrayinterface.jl") +include("abstractdiagonalarray/diagonalarraydiaginterface.jl") +include("diagonalarray/diagonalarray.jl") +include("diagonalarray/diagonalmatrix.jl") +include("diagonalarray/diagonalvector.jl") end diff --git a/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/abstractdiagonalarray.jl b/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/abstractdiagonalarray.jl new file mode 100644 index 0000000000..b6131450cf --- /dev/null +++ b/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/abstractdiagonalarray.jl @@ -0,0 +1,3 @@ +using ..SparseArrayInterface: AbstractSparseArray + +abstract type AbstractDiagonalArray{T,N} <: AbstractSparseArray{T,N} end diff --git a/NDTensors/src/lib/DiagonalArrays/src/diagonalarraydiaginterface.jl b/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/diagonalarraydiaginterface.jl similarity index 59% rename from NDTensors/src/lib/DiagonalArrays/src/diagonalarraydiaginterface.jl rename to NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/diagonalarraydiaginterface.jl index 3b6c17bfae..5f76a6abe0 100644 --- a/NDTensors/src/lib/DiagonalArrays/src/diagonalarraydiaginterface.jl +++ b/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/diagonalarraydiaginterface.jl @@ -2,22 +2,22 @@ using ..SparseArrayInterface: SparseArrayInterface, StorageIndex, StorageIndices SparseArrayInterface.StorageIndex(i::DiagIndex) = StorageIndex(index(i)) -function Base.getindex(a::DiagonalArray, i::DiagIndex) +function Base.getindex(a::AbstractDiagonalArray, i::DiagIndex) return a[StorageIndex(i)] end -function Base.setindex!(a::DiagonalArray, value, i::DiagIndex) +function Base.setindex!(a::AbstractDiagonalArray, value, i::DiagIndex) a[StorageIndex(i)] = value return a end SparseArrayInterface.StorageIndices(i::DiagIndices) = StorageIndices(indices(i)) -function Base.getindex(a::DiagonalArray, i::DiagIndices) +function Base.getindex(a::AbstractDiagonalArray, i::DiagIndices) return a[StorageIndices(i)] end -function Base.setindex!(a::DiagonalArray, value, i::DiagIndices) +function Base.setindex!(a::AbstractDiagonalArray, value, i::DiagIndices) a[StorageIndices(i)] = value return a end diff --git a/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/sparsearrayinterface.jl b/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/sparsearrayinterface.jl new file mode 100644 index 0000000000..32036920a1 --- /dev/null +++ b/NDTensors/src/lib/DiagonalArrays/src/abstractdiagonalarray/sparsearrayinterface.jl @@ -0,0 +1,22 @@ +using Compat: Returns, allequal +using ..SparseArrayInterface: SparseArrayInterface + +# `SparseArrayInterface` interface +function SparseArrayInterface.index_to_storage_index( + a::AbstractDiagonalArray{<:Any,N}, I::CartesianIndex{N} +) where {N} + !allequal(Tuple(I)) && return nothing + return first(Tuple(I)) +end + +function SparseArrayInterface.storage_index_to_index(a::AbstractDiagonalArray, I) + return CartesianIndex(ntuple(Returns(I), ndims(a))) +end + +## # 1-dimensional case can be `AbstractDiagonalArray`. +## function SparseArrayInterface.sparse_similar( +## a::AbstractDiagonalArray, elt::Type, dims::Tuple{Int} +## ) +## # TODO: Handle preserving zero element function. +## return similar(a, elt, dims) +## end diff --git a/NDTensors/src/lib/DiagonalArrays/src/defaults.jl b/NDTensors/src/lib/DiagonalArrays/src/diaginterface/defaults.jl similarity index 100% rename from NDTensors/src/lib/DiagonalArrays/src/defaults.jl rename to NDTensors/src/lib/DiagonalArrays/src/diaginterface/defaults.jl diff --git a/NDTensors/src/lib/DiagonalArrays/src/diagindex.jl b/NDTensors/src/lib/DiagonalArrays/src/diaginterface/diagindex.jl similarity index 100% rename from NDTensors/src/lib/DiagonalArrays/src/diagindex.jl rename to NDTensors/src/lib/DiagonalArrays/src/diaginterface/diagindex.jl diff --git a/NDTensors/src/lib/DiagonalArrays/src/diagindices.jl b/NDTensors/src/lib/DiagonalArrays/src/diaginterface/diagindices.jl similarity index 100% rename from NDTensors/src/lib/DiagonalArrays/src/diagindices.jl rename to NDTensors/src/lib/DiagonalArrays/src/diaginterface/diagindices.jl diff --git a/NDTensors/src/lib/DiagonalArrays/src/diaginterface.jl b/NDTensors/src/lib/DiagonalArrays/src/diaginterface/diaginterface.jl similarity index 100% rename from NDTensors/src/lib/DiagonalArrays/src/diaginterface.jl rename to NDTensors/src/lib/DiagonalArrays/src/diaginterface/diaginterface.jl diff --git a/NDTensors/src/lib/DiagonalArrays/src/diagonalarray.jl b/NDTensors/src/lib/DiagonalArrays/src/diagonalarray/diagonalarray.jl similarity index 54% rename from NDTensors/src/lib/DiagonalArrays/src/diagonalarray.jl rename to NDTensors/src/lib/DiagonalArrays/src/diagonalarray/diagonalarray.jl index 13dbcc59f4..5ca9500899 100644 --- a/NDTensors/src/lib/DiagonalArrays/src/diagonalarray.jl +++ b/NDTensors/src/lib/DiagonalArrays/src/diagonalarray/diagonalarray.jl @@ -1,6 +1,8 @@ -using ..SparseArrayInterface: Zero +using ..SparseArrayInterface: Zero, getindex_zero_function +# TODO: Put into `DiagonalArraysSparseArrayDOKsExt`? +using ..SparseArrayDOKs: SparseArrayDOKs, SparseArrayDOK -struct DiagonalArray{T,N,Diag<:AbstractVector{T},Zero} <: AbstractArray{T,N} +struct DiagonalArray{T,N,Diag<:AbstractVector{T},Zero} <: AbstractDiagonalArray{T,N} diag::Diag dims::NTuple{N,Int} zero::Zero @@ -50,18 +52,48 @@ function DiagonalArray{<:Any,N}(diag::AbstractVector{T}) where {T,N} end # undef -function DiagonalArray{T,N}(::UndefInitializer, d::Tuple{Vararg{Int,N}}) where {T,N} - return DiagonalArray{T,N}(Vector{T}(undef, minimum(d)), d) +function DiagonalArray{T,N}( + ::UndefInitializer, d::Tuple{Vararg{Int,N}}, zero=Zero() +) where {T,N} + return DiagonalArray{T,N}(Vector{T}(undef, minimum(d)), d, zero) end function DiagonalArray{T,N}(::UndefInitializer, d::Vararg{Int,N}) where {T,N} return DiagonalArray{T,N}(undef, d) end -function DiagonalArray{T}(::UndefInitializer, d::Tuple{Vararg{Int,N}}) where {T,N} - return DiagonalArray{T,N}(undef, d) +function DiagonalArray{T}( + ::UndefInitializer, d::Tuple{Vararg{Int,N}}, zero=Zero() +) where {T,N} + return DiagonalArray{T,N}(undef, d, zero) end function DiagonalArray{T}(::UndefInitializer, d::Vararg{Int,N}) where {T,N} return DiagonalArray{T,N}(undef, d) end + +# Minimal `AbstractArray` interface +Base.size(a::DiagonalArray) = a.dims + +function Base.similar(a::DiagonalArray, elt::Type, dims::Tuple{Vararg{Int}}) + # TODO: Preserve zero element function. + return DiagonalArray{elt}(undef, dims, getindex_zero_function(a)) +end + +# Minimal `SparseArrayInterface` interface +SparseArrayInterface.sparse_storage(a::DiagonalArray) = a.diag + +# `SparseArrayInterface` +# Defines similar when the output can't be `DiagonalArray`, +# such as in `reshape`. +# TODO: Put into `DiagonalArraysSparseArrayDOKsExt`? +# TODO: Special case 2D to output `SparseMatrixCSC`? +function SparseArrayInterface.sparse_similar( + a::DiagonalArray, elt::Type, dims::Tuple{Vararg{Int}} +) + return SparseArrayDOK{elt}(undef, dims, getindex_zero_function(a)) +end + +function SparseArrayInterface.getindex_zero_function(a::DiagonalArray) + return a.zero +end diff --git a/NDTensors/src/lib/DiagonalArrays/src/diagonalmatrix.jl b/NDTensors/src/lib/DiagonalArrays/src/diagonalarray/diagonalmatrix.jl similarity index 100% rename from NDTensors/src/lib/DiagonalArrays/src/diagonalmatrix.jl rename to NDTensors/src/lib/DiagonalArrays/src/diagonalarray/diagonalmatrix.jl diff --git a/NDTensors/src/lib/DiagonalArrays/src/diagonalvector.jl b/NDTensors/src/lib/DiagonalArrays/src/diagonalarray/diagonalvector.jl similarity index 100% rename from NDTensors/src/lib/DiagonalArrays/src/diagonalvector.jl rename to NDTensors/src/lib/DiagonalArrays/src/diagonalarray/diagonalvector.jl diff --git a/NDTensors/src/lib/DiagonalArrays/src/sparsearrayinterface.jl b/NDTensors/src/lib/DiagonalArrays/src/sparsearrayinterface.jl deleted file mode 100644 index bbaed1be99..0000000000 --- a/NDTensors/src/lib/DiagonalArrays/src/sparsearrayinterface.jl +++ /dev/null @@ -1,71 +0,0 @@ -using Compat: Returns, allequal -using ..SparseArrayInterface: SparseArrayInterface -# TODO: Put into `DiagonalArraysSparseArrayDOKsExt`? -using ..SparseArrayDOKs: SparseArrayDOK - -# Minimal interface -SparseArrayInterface.storage(a::DiagonalArray) = a.diag - -function SparseArrayInterface.index_to_storage_index( - a::DiagonalArray{<:Any,N}, I::CartesianIndex{N} -) where {N} - !allequal(Tuple(I)) && return nothing - return first(Tuple(I)) -end - -function SparseArrayInterface.storage_index_to_index(a::DiagonalArray, I) - return CartesianIndex(ntuple(Returns(I), ndims(a))) -end - -# Defines similar when the output can't be `DiagonalArray`, -# such as in `reshape`. -# TODO: Put into `DiagonalArraysSparseArrayDOKsExt`? -# TODO: Special case 2D to output `SparseMatrixCSC`? -function SparseArrayInterface.sparse_similar( - a::DiagonalArray, elt::Type, dims::Tuple{Vararg{Int}} -) - return SparseArrayDOK{elt}(undef, dims) -end - -# 1-dimensional case can be `DiagonalArray`. -function SparseArrayInterface.sparse_similar(a::DiagonalArray, elt::Type, dims::Tuple{Int}) - return similar(a, elt, dims) -end - -# AbstractArray interface -Base.size(a::DiagonalArray) = a.dims - -function Base.getindex(a::DiagonalArray, I...) - return SparseArrayInterface.sparse_getindex(a, I...) -end - -function Base.setindex!(a::DiagonalArray, I...) - return SparseArrayInterface.sparse_setindex!(a, I...) -end - -function Base.similar(a::DiagonalArray, elt::Type, dims::Tuple{Vararg{Int}}) - return DiagonalArray{elt}(undef, dims) -end - -# AbstractArray functionality -# broadcast -function Broadcast.BroadcastStyle(arraytype::Type{<:DiagonalArray}) - return SparseArrayInterface.SparseArrayStyle{ndims(arraytype)}() -end - -# map -function Base.map!(f, dest::AbstractArray, src::DiagonalArray) - SparseArrayInterface.sparse_map!(f, dest, src) - return dest -end - -# permutedims -function Base.permutedims!(dest::AbstractArray, src::DiagonalArray, perm) - SparseArrayInterface.sparse_permutedims!(dest, src, perm) - return dest -end - -# reshape -function Base.reshape(a::DiagonalArray, dims::Tuple{Vararg{Int}}) - return SparseArrayInterface.sparse_reshape(a, dims) -end diff --git a/NDTensors/src/lib/GradedAxes/test/runtests.jl b/NDTensors/src/lib/GradedAxes/test/runtests.jl index 354d54aa63..4ff9e23382 100644 --- a/NDTensors/src/lib/GradedAxes/test/runtests.jl +++ b/NDTensors/src/lib/GradedAxes/test/runtests.jl @@ -1,5 +1,5 @@ @eval module $(gensym()) -using NDTensors.BlockArrays: Block, blocklength, blocklengths, findblock +using BlockArrays: Block, blocklength, blocklengths, findblock using NDTensors.GradedAxes: GradedAxes, blockmerge, diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/SparseArrayDOKs.jl b/NDTensors/src/lib/SparseArrayDOKs/src/SparseArrayDOKs.jl index 697f145ff6..2157c59213 100644 --- a/NDTensors/src/lib/SparseArrayDOKs/src/SparseArrayDOKs.jl +++ b/NDTensors/src/lib/SparseArrayDOKs/src/SparseArrayDOKs.jl @@ -1,11 +1,4 @@ module SparseArrayDOKs include("defaults.jl") include("sparsearraydok.jl") -include("sparsearrayinterface.jl") -include("base.jl") -include("broadcast.jl") -include("map.jl") -include("baseinterface.jl") -include("conversion.jl") -include("SparseArrayDOKsSparseArraysExt.jl") end diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/baseinterface.jl b/NDTensors/src/lib/SparseArrayDOKs/src/baseinterface.jl deleted file mode 100644 index bc486df96c..0000000000 --- a/NDTensors/src/lib/SparseArrayDOKs/src/baseinterface.jl +++ /dev/null @@ -1,15 +0,0 @@ -using ..SparseArrayInterface: SparseArrayInterface - -Base.size(a::SparseArrayDOK) = a.dims - -function Base.getindex(a::SparseArrayDOK, I...) - return SparseArrayInterface.sparse_getindex(a, I...) -end - -function Base.setindex!(a::SparseArrayDOK, I...) - return SparseArrayInterface.sparse_setindex!(a, I...) -end - -function Base.similar(a::SparseArrayDOK, elt::Type, dims::Tuple{Vararg{Int}}) - return SparseArrayDOK{elt}(undef, dims) -end diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/conversion.jl b/NDTensors/src/lib/SparseArrayDOKs/src/conversion.jl deleted file mode 100644 index b2f50aed31..0000000000 --- a/NDTensors/src/lib/SparseArrayDOKs/src/conversion.jl +++ /dev/null @@ -1,19 +0,0 @@ -function SparseArrayDOK{T,N,Zero}(a::SparseArrayDOK{T,N,Zero}) where {T,N,Zero} - return copy(a) -end - -function Base.convert( - ::Type{SparseArrayDOK{T,N,Zero}}, a::SparseArrayDOK{T,N,Zero} -) where {T,N,Zero} - return a -end - -Base.convert(type::Type{<:SparseArrayDOK}, a::AbstractArray) = type(a) - -SparseArrayDOK(a::AbstractArray) = SparseArrayDOK{eltype(a)}(a) - -SparseArrayDOK{T}(a::AbstractArray) where {T} = SparseArrayDOK{T,ndims(a)}(a) - -function SparseArrayDOK{T,N}(a::AbstractArray) where {T,N} - return SparseArrayInterface.sparse_convert(SparseArrayDOK{T,N}, a) -end diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/defaults.jl b/NDTensors/src/lib/SparseArrayDOKs/src/defaults.jl index 044341e8d4..074f4c940f 100644 --- a/NDTensors/src/lib/SparseArrayDOKs/src/defaults.jl +++ b/NDTensors/src/lib/SparseArrayDOKs/src/defaults.jl @@ -1,3 +1,4 @@ +using Dictionaries: Dictionary using ..SparseArrayInterface: Zero default_zero() = Zero() diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/sparsearraydok.jl b/NDTensors/src/lib/SparseArrayDOKs/src/sparsearraydok.jl index 9805e1a5f0..71aafd4f12 100644 --- a/NDTensors/src/lib/SparseArrayDOKs/src/sparsearraydok.jl +++ b/NDTensors/src/lib/SparseArrayDOKs/src/sparsearraydok.jl @@ -1,7 +1,9 @@ using Dictionaries: Dictionary +using ..SparseArrayInterface: + SparseArrayInterface, AbstractSparseArray, getindex_zero_function # TODO: Parametrize by `data`? -struct SparseArrayDOK{T,N,Zero} <: AbstractArray{T,N} +struct SparseArrayDOK{T,N,Zero} <: AbstractSparseArray{T,N} data::Dictionary{CartesianIndex{N},T} dims::NTuple{N,Int} zero::Zero @@ -59,3 +61,27 @@ end function SparseArrayDOK{T}(::UndefInitializer, dims::Tuple{Vararg{Int}}, zero) where {T} return SparseArrayDOK{T}(dims, zero) end + +# Base `AbstractArray` interface +Base.size(a::SparseArrayDOK) = a.dims + +SparseArrayInterface.getindex_zero_function(a::SparseArrayDOK) = a.zero + +function Base.similar(a::SparseArrayDOK, elt::Type, dims::Tuple{Vararg{Int}}) + return SparseArrayDOK{elt}(undef, dims, getindex_zero_function(a)) +end + +# `SparseArrayInterface` interface +SparseArrayInterface.sparse_storage(a::SparseArrayDOK) = a.data + +function SparseArrayInterface.dropall!(a::SparseArrayDOK) + return empty!(SparseArrayInterface.sparse_storage(a)) +end + +SparseArrayDOK(a::AbstractArray) = SparseArrayDOK{eltype(a)}(a) + +SparseArrayDOK{T}(a::AbstractArray) where {T} = SparseArrayDOK{T,ndims(a)}(a) + +function SparseArrayDOK{T,N}(a::AbstractArray) where {T,N} + return SparseArrayInterface.sparse_convert(SparseArrayDOK{T,N}, a) +end diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/sparsearrayinterface.jl b/NDTensors/src/lib/SparseArrayDOKs/src/sparsearrayinterface.jl deleted file mode 100644 index 594582f436..0000000000 --- a/NDTensors/src/lib/SparseArrayDOKs/src/sparsearrayinterface.jl +++ /dev/null @@ -1,22 +0,0 @@ -using Dictionaries: set! -using ..SparseArrayInterface: SparseArrayInterface - -SparseArrayInterface.storage(a::SparseArrayDOK) = a.data - -function SparseArrayInterface.index_to_storage_index( - a::SparseArrayDOK{<:Any,N}, I::CartesianIndex{N} -) where {N} - !isassigned(SparseArrayInterface.storage(a), I) && return nothing - return I -end - -function SparseArrayInterface.setindex_notstored!( - a::SparseArrayDOK{<:Any,N}, value, I::CartesianIndex{N} -) where {N} - set!(SparseArrayInterface.storage(a), I, value) - return a -end - -function SparseArrayInterface.empty_storage!(a::SparseArrayDOK) - return empty!(SparseArrayInterface.storage(a)) -end diff --git a/NDTensors/src/lib/SparseArrayDOKs/test/Project.toml b/NDTensors/src/lib/SparseArrayDOKs/test/Project.toml new file mode 100644 index 0000000000..9b1d5ccd25 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayDOKs/test/Project.toml @@ -0,0 +1,2 @@ +[deps] +NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" diff --git a/NDTensors/src/lib/SparseArrayInterface/.JuliaFormatter.toml b/NDTensors/src/lib/SparseArrayInterface/.JuliaFormatter.toml new file mode 100644 index 0000000000..08f664cdb9 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/.JuliaFormatter.toml @@ -0,0 +1,2 @@ +style = "blue" +indent = 2 diff --git a/NDTensors/src/lib/SparseArrayInterface/src/SparseArrayInterface.jl b/NDTensors/src/lib/SparseArrayInterface/src/SparseArrayInterface.jl index 9468eaf17c..b2c4b6cf4f 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/SparseArrayInterface.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/SparseArrayInterface.jl @@ -1,14 +1,24 @@ module SparseArrayInterface -include("densearray.jl") -include("interface.jl") -include("interface_optional.jl") -include("indexing.jl") -include("base.jl") -include("map.jl") -include("copyto.jl") -include("broadcast.jl") -include("conversion.jl") -include("wrappers.jl") -include("zero.jl") -# include("SparseArrayInterfaceSparseArraysExt.jl") +include("sparsearrayinterface/densearray.jl") +include("sparsearrayinterface/vectorinterface.jl") +include("sparsearrayinterface/interface.jl") +include("sparsearrayinterface/interface_optional.jl") +include("sparsearrayinterface/indexing.jl") +include("sparsearrayinterface/base.jl") +include("sparsearrayinterface/map.jl") +include("sparsearrayinterface/copyto.jl") +include("sparsearrayinterface/broadcast.jl") +include("sparsearrayinterface/conversion.jl") +include("sparsearrayinterface/wrappers.jl") +include("sparsearrayinterface/zero.jl") +include("sparsearrayinterface/SparseArrayInterfaceLinearAlgebraExt.jl") +include("abstractsparsearray/abstractsparsearray.jl") +include("abstractsparsearray/sparsearrayinterface.jl") +include("abstractsparsearray/base.jl") +include("abstractsparsearray/broadcast.jl") +include("abstractsparsearray/map.jl") +include("abstractsparsearray/baseinterface.jl") +include("abstractsparsearray/convert.jl") +include("abstractsparsearray/SparseArrayInterfaceSparseArraysExt.jl") +include("abstractsparsearray/SparseArrayInterfaceLinearAlgebraExt.jl") end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/SparseArrayInterfaceSparseArraysExt.jl b/NDTensors/src/lib/SparseArrayInterface/src/SparseArrayInterfaceSparseArraysExt.jl deleted file mode 100644 index 0f2d5d6b7f..0000000000 --- a/NDTensors/src/lib/SparseArrayInterface/src/SparseArrayInterfaceSparseArraysExt.jl +++ /dev/null @@ -1,10 +0,0 @@ -## using SparseArrays: SparseArrays, SparseMatrixCSC -## -## # Julia Base `SparseArrays.AbstractSparseArray` interface -## # SparseMatrixCSC.nnz -## nnz(a::AbstractArray) = nonzero_length(a) -## -## # SparseArrayInterface.SparseMatrixCSC -## function SparseMatrixCSC(a::AbstractMatrix) -## return error("Not implemented") -## end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/SparseArrayInterfaceLinearAlgebraExt.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/SparseArrayInterfaceLinearAlgebraExt.jl new file mode 100644 index 0000000000..6ba11d5a9a --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/SparseArrayInterfaceLinearAlgebraExt.jl @@ -0,0 +1,3 @@ +using LinearAlgebra: LinearAlgebra + +LinearAlgebra.norm(a::AbstractSparseArray, p::Real=2) = sparse_norm(a, p) diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/SparseArrayDOKsSparseArraysExt.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/SparseArrayInterfaceSparseArraysExt.jl similarity index 68% rename from NDTensors/src/lib/SparseArrayDOKs/src/SparseArrayDOKsSparseArraysExt.jl rename to NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/SparseArrayInterfaceSparseArraysExt.jl index 85d462ca78..5d9df56238 100644 --- a/NDTensors/src/lib/SparseArrayDOKs/src/SparseArrayDOKsSparseArraysExt.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/SparseArrayInterfaceSparseArraysExt.jl @@ -2,4 +2,4 @@ using SparseArrays: SparseArrays using ..SparseArrayInterface: nstored # Julia Base `AbstractSparseArray` interface -SparseArrays.nnz(a::SparseArrayDOK) = nstored(a) +SparseArrays.nnz(a::AbstractSparseArray) = nstored(a) diff --git a/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/abstractsparsearray.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/abstractsparsearray.jl new file mode 100644 index 0000000000..5a6ae67596 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/abstractsparsearray.jl @@ -0,0 +1 @@ +abstract type AbstractSparseArray{T,N} <: AbstractArray{T,N} end diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/base.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/base.jl similarity index 54% rename from NDTensors/src/lib/SparseArrayDOKs/src/base.jl rename to NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/base.jl index 11cc24d7a0..587571a060 100644 --- a/NDTensors/src/lib/SparseArrayDOKs/src/base.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/base.jl @@ -1,18 +1,18 @@ using ..SparseArrayInterface: SparseArrayInterface # Base -function Base.:(==)(a1::SparseArrayDOK, a2::SparseArrayDOK) +function Base.:(==)(a1::AbstractSparseArray, a2::AbstractSparseArray) return SparseArrayInterface.sparse_isequal(a1, a2) end -function Base.reshape(a::SparseArrayDOK, dims::Tuple{Vararg{Int}}) +function Base.reshape(a::AbstractSparseArray, dims::Tuple{Vararg{Int}}) return SparseArrayInterface.sparse_reshape(a, dims) end -function Base.zero(a::SparseArrayDOK) +function Base.zero(a::AbstractSparseArray) return SparseArrayInterface.sparse_zero(a) end -function Base.one(a::SparseArrayDOK) +function Base.one(a::AbstractSparseArray) return SparseArrayInterface.sparse_one(a) end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/baseinterface.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/baseinterface.jl new file mode 100644 index 0000000000..deb262c827 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/baseinterface.jl @@ -0,0 +1,15 @@ +using ..SparseArrayInterface: SparseArrayInterface + +Base.size(a::AbstractSparseArray) = error("Not implemented") + +function Base.similar(a::AbstractSparseArray, elt::Type, dims::Tuple{Vararg{Int}}) + return error("Not implemented") +end + +function Base.getindex(a::AbstractSparseArray, I...) + return SparseArrayInterface.sparse_getindex(a, I...) +end + +function Base.setindex!(a::AbstractSparseArray, I...) + return SparseArrayInterface.sparse_setindex!(a, I...) +end diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/broadcast.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/broadcast.jl similarity index 53% rename from NDTensors/src/lib/SparseArrayDOKs/src/broadcast.jl rename to NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/broadcast.jl index b7e17b41e0..152ddb2ab0 100644 --- a/NDTensors/src/lib/SparseArrayDOKs/src/broadcast.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/broadcast.jl @@ -1,4 +1,4 @@ # Broadcasting -function Broadcast.BroadcastStyle(arraytype::Type{<:SparseArrayDOK}) +function Broadcast.BroadcastStyle(arraytype::Type{<:AbstractSparseArray}) return SparseArrayInterface.SparseArrayStyle{ndims(arraytype)}() end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/convert.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/convert.jl new file mode 100644 index 0000000000..8b532ede73 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/convert.jl @@ -0,0 +1,7 @@ +Base.convert(type::Type{<:AbstractSparseArray}, a::AbstractArray) = type(a) + +Base.convert(::Type{T}, a::T) where {T<:AbstractSparseArray} = a + +function (::Type{T})(a::T) where {T<:AbstractSparseArray} + return copy(a) +end diff --git a/NDTensors/src/lib/SparseArrayDOKs/src/map.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/map.jl similarity index 56% rename from NDTensors/src/lib/SparseArrayDOKs/src/map.jl rename to NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/map.jl index b8b1656ac5..86eecc5dd0 100644 --- a/NDTensors/src/lib/SparseArrayDOKs/src/map.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/map.jl @@ -1,34 +1,34 @@ # Map -function Base.map!(f, dest::AbstractArray, src::SparseArrayDOK) +function Base.map!(f, dest::AbstractArray, src::AbstractSparseArray) SparseArrayInterface.sparse_map!(f, dest, src) return dest end -function Base.copy!(dest::AbstractArray, src::SparseArrayDOK) +function Base.copy!(dest::AbstractArray, src::AbstractSparseArray) SparseArrayInterface.sparse_copy!(dest, src) return dest end -function Base.copyto!(dest::AbstractArray, src::SparseArrayDOK) +function Base.copyto!(dest::AbstractArray, src::AbstractSparseArray) SparseArrayInterface.sparse_copyto!(dest, src) return dest end -function Base.permutedims!(dest::AbstractArray, src::SparseArrayDOK, perm) +function Base.permutedims!(dest::AbstractArray, src::AbstractSparseArray, perm) SparseArrayInterface.sparse_permutedims!(dest, src, perm) return dest end -function Base.mapreduce(f, op, a::SparseArrayDOK; kwargs...) +function Base.mapreduce(f, op, a::AbstractSparseArray; kwargs...) return SparseArrayInterface.sparse_mapreduce(f, op, a; kwargs...) end # TODO: Why isn't this calling `mapreduce` already? -function Base.iszero(a::SparseArrayDOK) +function Base.iszero(a::AbstractSparseArray) return SparseArrayInterface.sparse_iszero(a) end # TODO: Why isn't this calling `mapreduce` already? -function Base.isreal(a::SparseArrayDOK) +function Base.isreal(a::AbstractSparseArray) return SparseArrayInterface.sparse_isreal(a) end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/sparsearrayinterface.jl b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/sparsearrayinterface.jl new file mode 100644 index 0000000000..e28e92d488 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/src/abstractsparsearray/sparsearrayinterface.jl @@ -0,0 +1,18 @@ +using Dictionaries: set! +using ..SparseArrayInterface: SparseArrayInterface + +SparseArrayInterface.sparse_storage(::AbstractSparseArray) = error("Not implemented") + +function SparseArrayInterface.index_to_storage_index( + a::AbstractSparseArray{<:Any,N}, I::CartesianIndex{N} +) where {N} + !isassigned(SparseArrayInterface.sparse_storage(a), I) && return nothing + return I +end + +function SparseArrayInterface.setindex_notstored!( + a::AbstractSparseArray{<:Any,N}, value, I::CartesianIndex{N} +) where {N} + set!(SparseArrayInterface.sparse_storage(a), I, value) + return a +end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/backup/basics.jl b/NDTensors/src/lib/SparseArrayInterface/src/backup/basics.jl deleted file mode 100644 index 7c408a2afe..0000000000 --- a/NDTensors/src/lib/SparseArrayInterface/src/backup/basics.jl +++ /dev/null @@ -1,45 +0,0 @@ -# Also look into: -# https://juliaarrays.github.io/ArrayInterface.jl/stable/sparsearrays/ - -# Minimal interface -# Data structure storing the nonzero values -nonzeros(a::AbstractArray) = a - -# Minimal interface -# Map a `CartesianIndex` to an index into `nonzeros`. -nonzero_index(a::AbstractArray{<:Any,N}, I::CartesianIndex{N}) where {N} = I - -## # Required `SparseArrayInterface` interface. -## # https://github.com/Jutho/SparseArrayKit.jl interface functions -## nonzero_keys(a::AbstractArray) = error("Not implemented") -## nonzero_values(a::AbstractArray) = error("Not implemented") -## nonzero_pairs(a::AbstractArray) = error("Not implemented") -## -## # A dictionary-like structure -## # TODO: Rename `nonzeros`, `structural_nonzeros`, etc.? -## nonzero_structure(a::AbstractArray) = error("Not implemented") -## -## # Derived `SparseArrayInterface` interface. -## nonzero_length(a::AbstractArray) = length(nonzero_keys(a)) -## is_structural_nonzero(a::AbstractArray, I) = I ∈ nonzero_keys(a) -## -## # Overload if zero value is index dependent or -## # doesn't match element type. -## getindex_nonzero(a::AbstractArray, I) = nonzero_structure(a)[I] -## getindex_zero(a::AbstractArray, I) = zero(eltype(a)) -## function setindex_zero!(a::AbstractArray, value, I) -## # TODO: This may need to be modified. -## nonzero_structure(a)[I] = value -## return a -## end -## function setindex_nonzero!(a::AbstractArray, value, I) -## nonzero_structure(a)[I] = value -## return a -## end -## -## struct Zero end -## (::Zero)(type, I) = zero(type) -## -## default_zero() = Zero() # (eltype, I) -> zero(eltype) -## default_keytype(ndims::Int) = CartesianIndex{ndims} -## default_data(type::Type, ndims::Int) = Dictionary{default_keytype(ndims),type}() diff --git a/NDTensors/src/lib/SparseArrayInterface/src/backup/broadcast.jl b/NDTensors/src/lib/SparseArrayInterface/src/backup/broadcast.jl deleted file mode 100644 index c539aeb6b3..0000000000 --- a/NDTensors/src/lib/SparseArrayInterface/src/backup/broadcast.jl +++ /dev/null @@ -1,38 +0,0 @@ -using Base.Broadcast: BroadcastStyle, AbstractArrayStyle, DefaultArrayStyle, Broadcasted -using ..BroadcastMapConversion: map_function, map_args - -struct SparseArrayDOKStyle{N} <: AbstractArrayStyle{N} end - -function Broadcast.BroadcastStyle(::Type{<:SparseArrayDOK{<:Any,N}}) where {N} - return SparseArrayDOKStyle{N}() -end - -SparseArrayDOKStyle(::Val{N}) where {N} = SparseArrayDOKStyle{N}() -SparseArrayDOKStyle{M}(::Val{N}) where {M,N} = SparseArrayDOKStyle{N}() - -Broadcast.BroadcastStyle(a::SparseArrayDOKStyle, ::DefaultArrayStyle{0}) = a -function Broadcast.BroadcastStyle(::SparseArrayDOKStyle{N}, a::DefaultArrayStyle) where {N} - return BroadcastStyle(DefaultArrayStyle{N}(), a) -end -function Broadcast.BroadcastStyle( - ::SparseArrayDOKStyle{N}, ::Broadcast.Style{Tuple} -) where {N} - return DefaultArrayStyle{N}() -end - -# TODO: Use `allocate_output`, share logic with `map`. -function Base.similar(bc::Broadcasted{<:SparseArrayDOKStyle}, elt::Type) - # TODO: Is this a good definition? Probably should check that - # they have consistent axes. - return similar(first(map_args(bc)), elt) -end - -# Broadcasting implementation -function Base.copyto!( - dest::SparseArrayDOK{<:Any,N}, bc::Broadcasted{SparseArrayDOKStyle{N}} -) where {N} - # convert to map - # flatten and only keep the AbstractArray arguments - map!(map_function(bc), dest, map_args(bc)...) - return dest -end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/SparseArrayInterfaceLinearAlgebraExt.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/SparseArrayInterfaceLinearAlgebraExt.jl new file mode 100644 index 0000000000..db9bda209d --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/SparseArrayInterfaceLinearAlgebraExt.jl @@ -0,0 +1,3 @@ +using LinearAlgebra: norm + +sparse_norm(a::AbstractArray, p::Real=2) = norm(sparse_storage(a)) diff --git a/NDTensors/src/lib/SparseArrayInterface/src/base.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/base.jl similarity index 91% rename from NDTensors/src/lib/SparseArrayInterface/src/base.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/base.jl index cc43a3a224..5a97c8b132 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/base.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/base.jl @@ -44,34 +44,35 @@ end # but we don't use that here since `sparse_fill!` # is used inside of `sparse_map!`. function sparse_fill!(a::AbstractArray, x) - if iszero(x) - empty_storage!(a) - end - fill!(storage(a), x) + ## TODO: Add this back? + ## if iszero(x) + ## # TODO: Check that `typeof(x)` is compatible + ## # with `eltype(a)`. + ## dropall!(a) + ## end + fill!(sparse_storage(a), x) return a end # This could just call `sparse_fill!` # but it avoids a zero construction and check. function sparse_zero!(a::AbstractArray) - empty_storage!(a) - fill!(storage(a), zero(eltype(a))) + dropall!(a) + sparse_zerovector!(a) return a end -# TODO: Make `sparse_zero!`? function sparse_zero(a::AbstractArray) # Need to overload `similar` for custom types a = similar(a) - # TODO: Use custom zero value? - sparse_fill!(a, zero(eltype(a))) + sparse_zerovector!(a) return a end # TODO: Is this a good definition? function sparse_zero(arraytype::Type{<:AbstractArray}, dims::Tuple{Vararg{Int}}) a = arraytype(undef, dims) - sparse_fill!(a, zero(eltype(a))) + sparse_zerovector!(a) return a end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/broadcast.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/broadcast.jl similarity index 100% rename from NDTensors/src/lib/SparseArrayInterface/src/broadcast.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/broadcast.jl diff --git a/NDTensors/src/lib/SparseArrayInterface/src/conversion.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/conversion.jl similarity index 100% rename from NDTensors/src/lib/SparseArrayInterface/src/conversion.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/conversion.jl diff --git a/NDTensors/src/lib/SparseArrayInterface/src/copyto.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/copyto.jl similarity index 100% rename from NDTensors/src/lib/SparseArrayInterface/src/copyto.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/copyto.jl diff --git a/NDTensors/src/lib/SparseArrayInterface/src/densearray.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/densearray.jl similarity index 100% rename from NDTensors/src/lib/SparseArrayInterface/src/densearray.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/densearray.jl diff --git a/NDTensors/src/lib/SparseArrayInterface/src/indexing.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/indexing.jl similarity index 95% rename from NDTensors/src/lib/SparseArrayInterface/src/indexing.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/indexing.jl index 0e94b39c38..d267fd42f2 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/indexing.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/indexing.jl @@ -18,7 +18,7 @@ end index(i::StoredIndex) = i.iouter StorageIndex(i::StoredIndex) = i.istorage -nstored(a::AbstractArray) = length(storage(a)) +nstored(a::AbstractArray) = length(sparse_storage(a)) struct NotStoredIndex{Iouter} <: MaybeStoredIndex{Iouter} iouter::Iouter @@ -32,7 +32,7 @@ MaybeStoredIndex(I, I_storage) = StoredIndex(I, StorageIndex(I_storage)) MaybeStoredIndex(I, I_storage::Nothing) = NotStoredIndex(I) function storage_indices(a::AbstractArray) - return eachindex(storage(a)) + return eachindex(sparse_storage(a)) end # Derived @@ -49,7 +49,7 @@ function sparse_getindex(a::AbstractArray, I::StoredIndex) end function sparse_getindex(a::AbstractArray, I::StorageIndex) - return storage(a)[index(I)] + return sparse_storage(a)[index(I)] end function sparse_getindex(a::AbstractArray{<:Any,N}, I::Vararg{Int,N}) where {N} @@ -92,7 +92,7 @@ end # Update a nonzero value function sparse_setindex!(a::AbstractArray, value, I::StorageIndex) - storage(a)[index(I)] = value + sparse_storage(a)[index(I)] = value return a end @@ -149,7 +149,7 @@ end indices(i::StorageIndices) = i.i function sparse_getindex(a::AbstractArray, I::StorageIndices{Colon}) - return storage(a) + return sparse_storage(a) end function sparse_getindex(a::AbstractArray, I::StorageIndices) @@ -157,7 +157,7 @@ function sparse_getindex(a::AbstractArray, I::StorageIndices) end function sparse_setindex!(a::AbstractArray, value, I::StorageIndices{Colon}) - storage(a) .= value + sparse_storage(a) .= value return a end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/interface.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/interface.jl similarity index 94% rename from NDTensors/src/lib/SparseArrayInterface/src/interface.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/interface.jl index 68257965c8..19ce5a2e28 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/interface.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/interface.jl @@ -4,7 +4,7 @@ # Minimal interface # Data structure of the stored (generally nonzero) values # nonzeros(a::AbstractArray) = a -storage(a::AbstractArray) = a +sparse_storage(a::AbstractArray) = a # Minimal interface # Map an index into the stored data to a CartesianIndex of the diff --git a/NDTensors/src/lib/SparseArrayInterface/src/interface_optional.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/interface_optional.jl similarity index 77% rename from NDTensors/src/lib/SparseArrayInterface/src/interface_optional.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/interface_optional.jl index d1f8f0bb2e..0e67b0977f 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/interface_optional.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/interface_optional.jl @@ -1,7 +1,9 @@ # Optional interface. # Access a zero value. +getindex_zero_function(::AbstractArray) = Zero() + function getindex_notstored(a::AbstractArray, I) - return zero(eltype(a)) + return getindex_zero_function(a)(a, I) end # Optional interface. @@ -24,9 +26,12 @@ end # Array types should overload `Base.dataids` to opt-in # to aliasing detection with `Base.mightalias` # to avoid emptying an input array in the case of `sparse_map!`. -# `empty_storage!` is used to zero out the output array. +# `dropall!` is used to zero out the output array. # See also `Base.unalias` and `Base.unaliascopy`. -empty_storage!(a::AbstractArray) = a +# Interface is inspired by Base `SparseArrays.droptol!` +# and `SparseArrays.dropzeros!`, and is like +# `dropall!(a) = SparseArrays.droptol!(a, Inf)`. +dropall!(a::AbstractArray) = a # Overload function sparse_similar(a::AbstractArray, elt::Type, dims::Tuple{Vararg{Int}}) diff --git a/NDTensors/src/lib/SparseArrayInterface/src/map.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/map.jl similarity index 97% rename from NDTensors/src/lib/SparseArrayInterface/src/map.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/map.jl index 6b50167130..ef0b5918d0 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/map.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/map.jl @@ -60,7 +60,7 @@ end # TODO: Generalize to multiple arguements. # TODO: Define `sparse_mapreducedim!`. function sparse_mapreduce(f, op, a::AbstractArray; kwargs...) - output = mapreduce(f, op, storage(a); kwargs...) + output = mapreduce(f, op, sparse_storage(a); kwargs...) # TODO: Use more general `zero` value. # TODO: Better way to check that zeros don't affect the output? @assert op(output, f(zero(eltype(a)))) == output diff --git a/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/vectorinterface.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/vectorinterface.jl new file mode 100644 index 0000000000..787ec83019 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/vectorinterface.jl @@ -0,0 +1,28 @@ +using VectorInterface: zerovector! + +################################################## +# TODO: Move to `DictionariesVectorInterfaceExt`. +using VectorInterface: VectorInterface, zerovector!, zerovector!! +using Dictionaries: AbstractDictionary + +function VectorInterface.zerovector!(x::AbstractDictionary{<:Number}) + return fill!(x, zero(scalartype(x))) +end +function VectorInterface.zerovector!(x::AbstractDictionary) + T = eltype(x) + for I in eachindex(x) + if isbitstype(T) || isassigned(x, I) + x[I] = zerovector!!(x[I]) + else + x[I] = zero(eltype(x)) + end + end + return x +end +################################################## + +function sparse_zerovector!(a::AbstractArray) + dropall!(a) + zerovector!(sparse_storage(a)) + return a +end diff --git a/NDTensors/src/lib/SparseArrayInterface/src/wrappers.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/wrappers.jl similarity index 83% rename from NDTensors/src/lib/SparseArrayInterface/src/wrappers.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/wrappers.jl index 199573fc4e..6dbf922c37 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/wrappers.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/wrappers.jl @@ -2,10 +2,13 @@ perm(::PermutedDimsArray{<:Any,<:Any,P}) where {P} = P genperm(v, perm) = map(j -> v[j], perm) genperm(v::CartesianIndex, perm) = CartesianIndex(map(j -> Tuple(v)[j], perm)) -storage(a::PermutedDimsArray) = storage(parent(a)) +# TODO: Should the keys get permuted? +sparse_storage(a::PermutedDimsArray) = sparse_storage(parent(a)) + function storage_index_to_index(a::PermutedDimsArray, I) return genperm(storage_index_to_index(parent(a), I), perm(a)) end + function index_to_storage_index( a::PermutedDimsArray{<:Any,N}, I::CartesianIndex{N} ) where {N} diff --git a/NDTensors/src/lib/SparseArrayInterface/src/zero.jl b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/zero.jl similarity index 71% rename from NDTensors/src/lib/SparseArrayInterface/src/zero.jl rename to NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/zero.jl index 24757594a4..72899b4445 100644 --- a/NDTensors/src/lib/SparseArrayInterface/src/zero.jl +++ b/NDTensors/src/lib/SparseArrayInterface/src/sparsearrayinterface/zero.jl @@ -1,4 +1,5 @@ # Represents a zero value and an index # TODO: Rename `GetIndexZero`? struct Zero end +(f::Zero)(a::AbstractArray, I) = f(eltype(a), I) (::Zero)(type::Type, I) = zero(type) diff --git a/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/AbstractSparseArrays.jl b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/AbstractSparseArrays.jl new file mode 100644 index 0000000000..fa7bf6707c --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/AbstractSparseArrays.jl @@ -0,0 +1,50 @@ +module AbstractSparseArrays +using NDTensors.SparseArrayInterface: SparseArrayInterface, AbstractSparseArray + +struct SparseArray{T,N} <: AbstractSparseArray{T,N} + data::Vector{T} + dims::Tuple{Vararg{Int,N}} + index_to_dataindex::Dict{CartesianIndex{N},Int} + dataindex_to_index::Vector{CartesianIndex{N}} +end +function SparseArray{T,N}(dims::Tuple{Vararg{Int,N}}) where {T,N} + return SparseArray{T,N}( + T[], dims, Dict{CartesianIndex{N},Int}(), Vector{CartesianIndex{N}}() + ) +end +SparseArray{T,N}(dims::Vararg{Int,N}) where {T,N} = SparseArray{T,N}(dims) +SparseArray{T}(dims::Tuple{Vararg{Int}}) where {T} = SparseArray{T,length(dims)}(dims) +SparseArray{T}(dims::Vararg{Int}) where {T} = SparseArray{T}(dims) + +# AbstractArray interface +Base.size(a::SparseArray) = a.dims +function Base.similar(a::SparseArray, elt::Type, dims::Tuple{Vararg{Int}}) + return SparseArray{elt}(dims) +end + +# Minimal interface +SparseArrayInterface.sparse_storage(a::SparseArray) = a.data +function SparseArrayInterface.index_to_storage_index( + a::SparseArray{<:Any,N}, I::CartesianIndex{N} +) where {N} + return get(a.index_to_dataindex, I, nothing) +end +SparseArrayInterface.storage_index_to_index(a::SparseArray, I) = a.dataindex_to_index[I] +function SparseArrayInterface.setindex_notstored!( + a::SparseArray{<:Any,N}, value, I::CartesianIndex{N} +) where {N} + push!(a.data, value) + push!(a.dataindex_to_index, I) + a.index_to_dataindex[I] = length(a.data) + return a +end + +# Empty the storage, helps with efficiency in `map!` to drop +# zeros. +function SparseArrayInterface.dropall!(a::SparseArray) + empty!(a.data) + empty!(a.index_to_dataindex) + empty!(a.dataindex_to_index) + return a +end +end diff --git a/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/DiagonalArrays.jl b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/DiagonalArrays.jl index 9285498935..e3164fe675 100644 --- a/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/DiagonalArrays.jl +++ b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/DiagonalArrays.jl @@ -31,7 +31,7 @@ function Base.similar(a::DiagonalArray, elt::Type, dims::Tuple{Vararg{Int}}) end # Minimal interface -SparseArrayInterface.storage(a::DiagonalArray) = a.data +SparseArrayInterface.sparse_storage(a::DiagonalArray) = a.data function SparseArrayInterface.index_to_storage_index( a::DiagonalArray{<:Any,N}, I::CartesianIndex{N} ) where {N} diff --git a/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl index 01d00f8dc0..9c1b7b6d54 100644 --- a/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl +++ b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl @@ -1,4 +1,5 @@ module SparseArrayInterfaceTestUtils -include("SparseArrays.jl") +include("AbstractSparseArrays.jl") include("DiagonalArrays.jl") +include("SparseArrays.jl") end diff --git a/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrays.jl b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrays.jl index 8a0c244f95..162d0e4a2c 100644 --- a/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrays.jl +++ b/NDTensors/src/lib/SparseArrayInterface/test/SparseArrayInterfaceTestUtils/SparseArrays.jl @@ -18,18 +18,19 @@ SparseArray{T}(dims::Vararg{Int}) where {T} = SparseArray{T}(dims) # AbstractArray interface Base.size(a::SparseArray) = a.dims +function Base.similar(a::SparseArray, elt::Type, dims::Tuple{Vararg{Int}}) + return SparseArray{elt}(dims) +end + function Base.getindex(a::SparseArray, I...) return SparseArrayInterface.sparse_getindex(a, I...) end function Base.setindex!(a::SparseArray, I...) return SparseArrayInterface.sparse_setindex!(a, I...) end -function Base.similar(a::SparseArray, elt::Type, dims::Tuple{Vararg{Int}}) - return SparseArray{elt}(dims) -end # Minimal interface -SparseArrayInterface.storage(a::SparseArray) = a.data +SparseArrayInterface.sparse_storage(a::SparseArray) = a.data function SparseArrayInterface.index_to_storage_index( a::SparseArray{<:Any,N}, I::CartesianIndex{N} ) where {N} @@ -47,10 +48,11 @@ end # Empty the storage, helps with efficiency in `map!` to drop # zeros. -function SparseArrayInterface.empty_storage!(a::SparseArray) +function SparseArrayInterface.dropall!(a::SparseArray) empty!(a.data) empty!(a.index_to_dataindex) - return empty!(a.dataindex_to_index) + empty!(a.dataindex_to_index) + return a end # Broadcasting diff --git a/NDTensors/src/lib/SparseArrayInterface/test/runtests.jl b/NDTensors/src/lib/SparseArrayInterface/test/runtests.jl index 2ae116cf59..de8fc18fc6 100644 --- a/NDTensors/src/lib/SparseArrayInterface/test/runtests.jl +++ b/NDTensors/src/lib/SparseArrayInterface/test/runtests.jl @@ -1,340 +1,5 @@ @eval module $(gensym()) -using Compat: Returns, allequal -using LinearAlgebra: norm -include("SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl") -using .SparseArrayInterfaceTestUtils.DiagonalArrays: DiagonalArray -using .SparseArrayInterfaceTestUtils.SparseArrays: SparseArray -using Test: @test, @testset, @test_broken, @test_throws - -@testset "SparseArrayInterface (eltype=$elt)" for elt in - (Float32, ComplexF32, Float64, ComplexF64) - @testset "Array" begin - using NDTensors.SparseArrayInterface: SparseArrayInterface - a = randn(2, 3) - @test SparseArrayInterface.storage(a) == a - @test SparseArrayInterface.index_to_storage_index(a, CartesianIndex(1, 2)) == - CartesianIndex(1, 2) - @test SparseArrayInterface.storage_index_to_index(a, CartesianIndex(1, 2)) == - CartesianIndex(1, 2) - end - @testset "Custom SparseArray" begin - a = SparseArray{elt}(2, 3) - @test size(a) == (2, 3) - @test axes(a) == (1:2, 1:3) - @test SparseArrayInterface.storage(a) == elt[] - @test iszero(SparseArrayInterface.nstored(a)) - @test collect(SparseArrayInterface.stored_indices(a)) == CartesianIndex{2}[] - @test iszero(a) - @test iszero(norm(a)) - for I in eachindex(a) - @test iszero(a) - end - - a = SparseArray{elt}(2, 3) - fill!(a, 0) - @test size(a) == (2, 3) - @test iszero(a) - @test iszero(SparseArrayInterface.nstored(a)) - - a_dense = SparseArrayInterface.densearray(a) - @test a_dense == a - @test a_dense isa Array{elt,ndims(a)} - - a = SparseArray{elt}(2, 3) - fill!(a, 2) - @test size(a) == (2, 3) - @test !iszero(a) - @test SparseArrayInterface.nstored(a) == length(a) - for I in eachindex(a) - @test a[I] == 2 - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - @test a[1, 2] == 12 - @test a[3] == 12 # linear indexing - @test size(a) == (2, 3) - @test axes(a) == (1:2, 1:3) - @test a[SparseArrayInterface.StorageIndex(1)] == 12 - @test SparseArrayInterface.storage(a) == elt[12] - @test isone(SparseArrayInterface.nstored(a)) - @test collect(SparseArrayInterface.stored_indices(a)) == [CartesianIndex(1, 2)] - @test !iszero(a) - @test !iszero(norm(a)) - for I in eachindex(a) - if I == CartesianIndex(1, 2) - @test a[I] == 12 - else - @test iszero(a[I]) - end - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - a = map(x -> 2x, a) - for I in eachindex(a) - if I == CartesianIndex(1, 2) - @test a[I] == 2 * 12 - else - @test iszero(a[I]) - end - end - - a = SparseArray{elt}(2, 2, 2) - a[1, 2, 2] = 122 - a_r = reshape(a, 2, 4) - @test a_r[1, 4] == a[1, 2, 2] == 122 - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - a = zero(a) - @test size(a) == (2, 3) - @test iszero(SparseArrayInterface.nstored(a)) - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = SparseArray{elt}(2, 3) - b[2, 1] = 21 - @test a == a - @test a ≠ b - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - @test isreal(a) - - a = SparseArray{elt}(2, 3) - a[1, 2] = randn(elt) - b = copy(a) - conj!(b) - for I in eachindex(a) - @test conj(a[I]) == b[I] - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = randn(elt) - b = conj(a) - for I in eachindex(a) - @test conj(a[I]) == b[I] - end - - if !(elt <: Real) - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 + 12im - @test !isreal(a) - end - - a = SparseArray{elt}(2, 2) - a[1, 2] = 12 - a = one(a) - @test size(a) == (2, 2) - @test isone(a[1, 1]) - @test isone(a[2, 2]) - @test iszero(a[1, 2]) - @test iszero(a[2, 1]) - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - a = zero(a) - @test size(a) == (2, 3) - @test iszero(SparseArrayInterface.nstored(a)) - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - a = copy(a) - @test size(a) == (2, 3) - @test axes(a) == (1:2, 1:3) - @test SparseArrayInterface.storage(a) == elt[12] - @test isone(SparseArrayInterface.nstored(a)) - @test SparseArrayInterface.storage_indices(a) == 1:1 - @test collect(SparseArrayInterface.stored_indices(a)) == [CartesianIndex(1, 2)] - @test !iszero(a) - @test !iszero(norm(a)) - for I in eachindex(a) - if I == CartesianIndex(1, 2) - @test a[I] == 12 - else - @test iszero(a[I]) - end - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - a = 2 * a - @test size(a) == (2, 3) - @test axes(a) == (1:2, 1:3) - @test SparseArrayInterface.storage(a) == elt[24] - @test isone(SparseArrayInterface.nstored(a)) - @test collect(SparseArrayInterface.stored_indices(a)) == [CartesianIndex(1, 2)] - @test !iszero(a) - @test !iszero(norm(a)) - for I in eachindex(a) - if I == CartesianIndex(1, 2) - @test a[I] == 24 - else - @test iszero(a[I]) - end - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = SparseArray{elt}(2, 3) - b[2, 1] = 21 - c = a + b - @test size(c) == (2, 3) - @test axes(c) == (1:2, 1:3) - @test SparseArrayInterface.storage(c) == elt[12, 21] - @test SparseArrayInterface.nstored(c) == 2 - @test collect(SparseArrayInterface.stored_indices(c)) == - [CartesianIndex(1, 2), CartesianIndex(2, 1)] - @test !iszero(c) - @test !iszero(norm(c)) - for I in eachindex(c) - if I == CartesianIndex(1, 2) - @test c[I] == 12 - elseif I == CartesianIndex(2, 1) - @test c[I] == 21 - else - @test iszero(c[I]) - end - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = permutedims(a, (2, 1)) - @test size(b) == (3, 2) - @test axes(b) == (1:3, 1:2) - @test SparseArrayInterface.storage(b) == elt[12] - @test SparseArrayInterface.nstored(b) == 1 - @test collect(SparseArrayInterface.stored_indices(b)) == [CartesianIndex(2, 1)] - @test !iszero(b) - @test !iszero(norm(b)) - for I in eachindex(b) - if I == CartesianIndex(2, 1) - @test b[I] == 12 - else - @test iszero(b[I]) - end - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = randn(elt, 2, 3) - b .= a - @test a == b - for I in eachindex(a) - @test a[I] == b[I] - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = randn(elt, 2, 3) - b .= 2 .* a - @test 2 * a == b - for I in eachindex(a) - @test 2 * a[I] == b[I] - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = randn(elt, 2, 3) - b .= 2 .+ a - @test 2 .+ a == b - for I in eachindex(a) - @test 2 + a[I] == b[I] - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = randn(elt, 2, 3) - map!(x -> 2x, b, a) - @test 2 * a == b - for I in eachindex(a) - @test 2 * a[I] == b[I] - end - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = zeros(elt, 2, 3) - b[2, 1] = 21 - @test Array(a) == a - @test a + b == Array(a) + b - @test b + a == Array(a) + b - @test b .+ 2 .* a == 2 * Array(a) + b - @test a .+ 2 .* b == Array(a) + 2b - @test a + b isa Matrix{elt} - @test b + a isa Matrix{elt} - @test SparseArrayInterface.nstored(a + b) == length(a) - - a = SparseArray{elt}(2, 3) - a[1, 2] = 12 - b = zeros(elt, 2, 3) - b[2, 1] = 21 - a′ = copy(a) - a′ .+= b - @test a′ == a + b - @test SparseArrayInterface.nstored(a′) == 2 - end - @testset "Custom DiagonalArray" begin - # TODO: Test `fill!`. - - # Test - a = DiagonalArray{elt}(undef, 2, 3) - @test size(a) == (2, 3) - a[1, 1] = 11 - a[2, 2] = 22 - @test a[1, 1] == 11 - @test a[2, 2] == 22 - @test_throws ArgumentError a[1, 2] = 12 - @test SparseArrayInterface.storage_indices(a) == 1:2 - @test collect(SparseArrayInterface.stored_indices(a)) == - [CartesianIndex(1, 1), CartesianIndex(2, 2)] - a[1, 2] = 0 - @test a[1, 1] == 11 - @test a[2, 2] == 22 - - a_dense = SparseArrayInterface.densearray(a) - @test a_dense == a - @test a_dense isa Array{elt,ndims(a)} - - b = similar(a) - @test b isa DiagonalArray - @test size(b) == (2, 3) - - a = DiagonalArray(elt[1, 2, 3], (3, 3)) - @test size(a) == (3, 3) - @test a[1, 1] == 1 - @test a[2, 2] == 2 - @test a[3, 3] == 3 - @test a[SparseArrayInterface.StorageIndex(1)] == 1 - @test a[SparseArrayInterface.StorageIndex(2)] == 2 - @test a[SparseArrayInterface.StorageIndex(3)] == 3 - @test iszero(a[1, 2]) - - a = DiagonalArray(elt[1, 2, 3], (3, 3)) - a = 2 * a - @test size(a) == (3, 3) - @test a[1, 1] == 2 - @test a[2, 2] == 4 - @test a[3, 3] == 6 - @test iszero(a[1, 2]) - - a = DiagonalArray(elt[1, 2, 3], (3, 3)) - a_r = reshape(a, 9) - @test a_r isa DiagonalArray{elt,1} - for I in LinearIndices(a) - @test a[I] == a_r[I] - end - - # This needs `Base.reshape` with a custom destination - # calling `SparseArrayInterface.sparse_reshape!` - # in order to specify an appropriate output - # type to work. - a = DiagonalArray(elt[1, 2], (2, 2, 2)) - a_r = reshape(a, 2, 4) - @test a_r isa Matrix{elt} - for I in LinearIndices(a) - @test a[I] == a_r[I] - end - end +for filename in ["abstractsparsearray", "array", "diagonalarray"] + include("test_$filename.jl") end end diff --git a/NDTensors/src/lib/SparseArrayInterface/test/test_abstractsparsearray.jl b/NDTensors/src/lib/SparseArrayInterface/test/test_abstractsparsearray.jl new file mode 100644 index 0000000000..1f7a18295c --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/test/test_abstractsparsearray.jl @@ -0,0 +1,269 @@ +@eval module $(gensym()) +using LinearAlgebra: norm +using NDTensors.SparseArrayInterface: SparseArrayInterface +include("SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl") +using .SparseArrayInterfaceTestUtils.AbstractSparseArrays: AbstractSparseArrays +using .SparseArrayInterfaceTestUtils.SparseArrays: SparseArrays +using Test: @test, @testset +@testset "AbstractSparseArray (arraytype=$SparseArray, eltype=$elt)" for SparseArray in ( + AbstractSparseArrays.SparseArray, SparseArrays.SparseArray + ), + elt in (Float32, ComplexF32, Float64, ComplexF64) + + a = SparseArray{elt}(2, 3) + @test size(a) == (2, 3) + @test axes(a) == (1:2, 1:3) + @test SparseArrayInterface.sparse_storage(a) == elt[] + @test iszero(SparseArrayInterface.nstored(a)) + @test collect(SparseArrayInterface.stored_indices(a)) == CartesianIndex{2}[] + @test iszero(a) + @test iszero(norm(a)) + for I in eachindex(a) + @test iszero(a) + end + + a = SparseArray{elt}(2, 3) + fill!(a, 0) + @test size(a) == (2, 3) + @test iszero(a) + @test iszero(SparseArrayInterface.nstored(a)) + + a_dense = SparseArrayInterface.densearray(a) + @test a_dense == a + @test a_dense isa Array{elt,ndims(a)} + + a = SparseArray{elt}(2, 3) + fill!(a, 2) + @test size(a) == (2, 3) + @test !iszero(a) + @test SparseArrayInterface.nstored(a) == length(a) + for I in eachindex(a) + @test a[I] == 2 + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + @test a[1, 2] == 12 + @test a[3] == 12 # linear indexing + @test size(a) == (2, 3) + @test axes(a) == (1:2, 1:3) + @test a[SparseArrayInterface.StorageIndex(1)] == 12 + @test SparseArrayInterface.sparse_storage(a) == elt[12] + @test isone(SparseArrayInterface.nstored(a)) + @test collect(SparseArrayInterface.stored_indices(a)) == [CartesianIndex(1, 2)] + @test !iszero(a) + @test !iszero(norm(a)) + for I in eachindex(a) + if I == CartesianIndex(1, 2) + @test a[I] == 12 + else + @test iszero(a[I]) + end + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + a = map(x -> 2x, a) + for I in eachindex(a) + if I == CartesianIndex(1, 2) + @test a[I] == 2 * 12 + else + @test iszero(a[I]) + end + end + + a = SparseArray{elt}(2, 2, 2) + a[1, 2, 2] = 122 + a_r = reshape(a, 2, 4) + @test a_r[1, 4] == a[1, 2, 2] == 122 + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + a = zero(a) + @test size(a) == (2, 3) + @test iszero(SparseArrayInterface.nstored(a)) + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = SparseArray{elt}(2, 3) + b[2, 1] = 21 + @test a == a + @test a ≠ b + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + @test isreal(a) + + a = SparseArray{elt}(2, 3) + a[1, 2] = randn(elt) + b = copy(a) + conj!(b) + for I in eachindex(a) + @test conj(a[I]) == b[I] + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = randn(elt) + b = conj(a) + for I in eachindex(a) + @test conj(a[I]) == b[I] + end + + if !(elt <: Real) + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + 12im + @test !isreal(a) + end + + a = SparseArray{elt}(2, 2) + a[1, 2] = 12 + a = one(a) + @test size(a) == (2, 2) + @test isone(a[1, 1]) + @test isone(a[2, 2]) + @test iszero(a[1, 2]) + @test iszero(a[2, 1]) + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + a = zero(a) + @test size(a) == (2, 3) + @test iszero(SparseArrayInterface.nstored(a)) + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + a = copy(a) + @test size(a) == (2, 3) + @test axes(a) == (1:2, 1:3) + @test SparseArrayInterface.sparse_storage(a) == elt[12] + @test isone(SparseArrayInterface.nstored(a)) + @test SparseArrayInterface.storage_indices(a) == 1:1 + @test collect(SparseArrayInterface.stored_indices(a)) == [CartesianIndex(1, 2)] + @test !iszero(a) + @test !iszero(norm(a)) + for I in eachindex(a) + if I == CartesianIndex(1, 2) + @test a[I] == 12 + else + @test iszero(a[I]) + end + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + a = 2 * a + @test size(a) == (2, 3) + @test axes(a) == (1:2, 1:3) + @test SparseArrayInterface.sparse_storage(a) == elt[24] + @test isone(SparseArrayInterface.nstored(a)) + @test collect(SparseArrayInterface.stored_indices(a)) == [CartesianIndex(1, 2)] + @test !iszero(a) + @test !iszero(norm(a)) + for I in eachindex(a) + if I == CartesianIndex(1, 2) + @test a[I] == 24 + else + @test iszero(a[I]) + end + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = SparseArray{elt}(2, 3) + b[2, 1] = 21 + c = a + b + @test size(c) == (2, 3) + @test axes(c) == (1:2, 1:3) + @test SparseArrayInterface.sparse_storage(c) == elt[12, 21] + @test SparseArrayInterface.nstored(c) == 2 + @test collect(SparseArrayInterface.stored_indices(c)) == + [CartesianIndex(1, 2), CartesianIndex(2, 1)] + @test !iszero(c) + @test !iszero(norm(c)) + for I in eachindex(c) + if I == CartesianIndex(1, 2) + @test c[I] == 12 + elseif I == CartesianIndex(2, 1) + @test c[I] == 21 + else + @test iszero(c[I]) + end + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = permutedims(a, (2, 1)) + @test size(b) == (3, 2) + @test axes(b) == (1:3, 1:2) + @test SparseArrayInterface.sparse_storage(b) == elt[12] + @test SparseArrayInterface.nstored(b) == 1 + @test collect(SparseArrayInterface.stored_indices(b)) == [CartesianIndex(2, 1)] + @test !iszero(b) + @test !iszero(norm(b)) + for I in eachindex(b) + if I == CartesianIndex(2, 1) + @test b[I] == 12 + else + @test iszero(b[I]) + end + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = randn(elt, 2, 3) + b .= a + @test a == b + for I in eachindex(a) + @test a[I] == b[I] + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = randn(elt, 2, 3) + b .= 2 .* a + @test 2 * a == b + for I in eachindex(a) + @test 2 * a[I] == b[I] + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = randn(elt, 2, 3) + b .= 2 .+ a + @test 2 .+ a == b + for I in eachindex(a) + @test 2 + a[I] == b[I] + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = randn(elt, 2, 3) + map!(x -> 2x, b, a) + @test 2 * a == b + for I in eachindex(a) + @test 2 * a[I] == b[I] + end + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = zeros(elt, 2, 3) + b[2, 1] = 21 + @test Array(a) == a + @test a + b == Array(a) + b + @test b + a == Array(a) + b + @test b .+ 2 .* a == 2 * Array(a) + b + @test a .+ 2 .* b == Array(a) + 2b + @test a + b isa Matrix{elt} + @test b + a isa Matrix{elt} + @test SparseArrayInterface.nstored(a + b) == length(a) + + a = SparseArray{elt}(2, 3) + a[1, 2] = 12 + b = zeros(elt, 2, 3) + b[2, 1] = 21 + a′ = copy(a) + a′ .+= b + @test a′ == a + b + @test SparseArrayInterface.nstored(a′) == 2 +end +end diff --git a/NDTensors/src/lib/SparseArrayInterface/test/test_array.jl b/NDTensors/src/lib/SparseArrayInterface/test/test_array.jl new file mode 100644 index 0000000000..9cc0381771 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/test/test_array.jl @@ -0,0 +1,13 @@ +@eval module $(gensym()) +using NDTensors.SparseArrayInterface: SparseArrayInterface +include("SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl") +using Test: @test, @testset +@testset "Array (eltype=$elt)" for elt in (Float32, ComplexF32, Float64, ComplexF64) + a = randn(2, 3) + @test SparseArrayInterface.sparse_storage(a) == a + @test SparseArrayInterface.index_to_storage_index(a, CartesianIndex(1, 2)) == + CartesianIndex(1, 2) + @test SparseArrayInterface.storage_index_to_index(a, CartesianIndex(1, 2)) == + CartesianIndex(1, 2) +end +end diff --git a/NDTensors/src/lib/SparseArrayInterface/test/test_diagonalarray.jl b/NDTensors/src/lib/SparseArrayInterface/test/test_diagonalarray.jl new file mode 100644 index 0000000000..6804647452 --- /dev/null +++ b/NDTensors/src/lib/SparseArrayInterface/test/test_diagonalarray.jl @@ -0,0 +1,69 @@ +@eval module $(gensym()) +using LinearAlgebra: norm +using NDTensors.SparseArrayInterface: SparseArrayInterface +include("SparseArrayInterfaceTestUtils/SparseArrayInterfaceTestUtils.jl") +using .SparseArrayInterfaceTestUtils.DiagonalArrays: DiagonalArray +using Test: @test, @testset, @test_throws +@testset "DiagonalArray (eltype=$elt)" for elt in (Float32, ComplexF32, Float64, ComplexF64) + # TODO: Test `fill!`. + + # Test + a = DiagonalArray{elt}(undef, 2, 3) + @test size(a) == (2, 3) + a[1, 1] = 11 + a[2, 2] = 22 + @test a[1, 1] == 11 + @test a[2, 2] == 22 + @test_throws ArgumentError a[1, 2] = 12 + @test SparseArrayInterface.storage_indices(a) == 1:2 + @test collect(SparseArrayInterface.stored_indices(a)) == + [CartesianIndex(1, 1), CartesianIndex(2, 2)] + a[1, 2] = 0 + @test a[1, 1] == 11 + @test a[2, 2] == 22 + + a_dense = SparseArrayInterface.densearray(a) + @test a_dense == a + @test a_dense isa Array{elt,ndims(a)} + + b = similar(a) + @test b isa DiagonalArray + @test size(b) == (2, 3) + + a = DiagonalArray(elt[1, 2, 3], (3, 3)) + @test size(a) == (3, 3) + @test a[1, 1] == 1 + @test a[2, 2] == 2 + @test a[3, 3] == 3 + @test a[SparseArrayInterface.StorageIndex(1)] == 1 + @test a[SparseArrayInterface.StorageIndex(2)] == 2 + @test a[SparseArrayInterface.StorageIndex(3)] == 3 + @test iszero(a[1, 2]) + + a = DiagonalArray(elt[1, 2, 3], (3, 3)) + a = 2 * a + @test size(a) == (3, 3) + @test a[1, 1] == 2 + @test a[2, 2] == 4 + @test a[3, 3] == 6 + @test iszero(a[1, 2]) + + a = DiagonalArray(elt[1, 2, 3], (3, 3)) + a_r = reshape(a, 9) + @test a_r isa DiagonalArray{elt,1} + for I in LinearIndices(a) + @test a[I] == a_r[I] + end + + # This needs `Base.reshape` with a custom destination + # calling `SparseArrayInterface.sparse_reshape!` + # in order to specify an appropriate output + # type to work. + a = DiagonalArray(elt[1, 2], (2, 2, 2)) + a_r = reshape(a, 2, 4) + @test a_r isa Matrix{elt} + for I in LinearIndices(a) + @test a[I] == a_r[I] + end +end +end diff --git a/NDTensors/test/arraytensor/Project.toml b/NDTensors/test/backup/arraytensor/Project.toml similarity index 100% rename from NDTensors/test/arraytensor/Project.toml rename to NDTensors/test/backup/arraytensor/Project.toml diff --git a/NDTensors/test/arraytensor/array.jl b/NDTensors/test/backup/arraytensor/array.jl similarity index 100% rename from NDTensors/test/arraytensor/array.jl rename to NDTensors/test/backup/arraytensor/array.jl diff --git a/NDTensors/test/arraytensor/blocksparsearray.jl b/NDTensors/test/backup/arraytensor/blocksparsearray.jl similarity index 100% rename from NDTensors/test/arraytensor/blocksparsearray.jl rename to NDTensors/test/backup/arraytensor/blocksparsearray.jl diff --git a/NDTensors/test/arraytensor/diagonalarray.jl b/NDTensors/test/backup/arraytensor/diagonalarray.jl similarity index 100% rename from NDTensors/test/arraytensor/diagonalarray.jl rename to NDTensors/test/backup/arraytensor/diagonalarray.jl diff --git a/NDTensors/test/arraytensor/runtests.jl b/NDTensors/test/backup/arraytensor/runtests.jl similarity index 100% rename from NDTensors/test/arraytensor/runtests.jl rename to NDTensors/test/backup/arraytensor/runtests.jl diff --git a/NDTensors/test/runtests.jl b/NDTensors/test/runtests.jl index 872ec7f5b1..6d81096d07 100644 --- a/NDTensors/test/runtests.jl +++ b/NDTensors/test/runtests.jl @@ -6,7 +6,7 @@ using SafeTestsets: @safetestset filenames = filter(readdir(@__DIR__)) do f startswith("test_")(f) && endswith(".jl")(f) end - for dir in ["lib", "arraytensor", "ITensors"] + for dir in ["lib", "ITensors"] push!(filenames, joinpath(dir, "runtests.jl")) end @testset "Test $(@__DIR__)/$filename" for filename in filenames diff --git a/src/ITensorsNamedDimsArraysExt/src/to_nameddimsarray.jl b/src/ITensorsNamedDimsArraysExt/src/to_nameddimsarray.jl index a17c8e7599..bb943bd0b6 100644 --- a/src/ITensorsNamedDimsArraysExt/src/to_nameddimsarray.jl +++ b/src/ITensorsNamedDimsArraysExt/src/to_nameddimsarray.jl @@ -49,11 +49,12 @@ function to_nameddimsarray(x::BlockSparseTensor) return named(arraystorage, name.(inds(x))) end -using ..NDTensors: CombinerTensor, CombinerArray, storage -# TODO: Delete when we directly use `CombinerArray` as storage. -function to_nameddimsarray(t::CombinerTensor) - return named(CombinerArray(storage(t), to_axes(inds(t))), name.(inds(t))) -end +## TODO: Add this back, define `CombinerArrays` library in NDTensors! +## using ..NDTensors: CombinerTensor, CombinerArray, storage +## # TODO: Delete when we directly use `CombinerArray` as storage. +## function to_nameddimsarray(t::CombinerTensor) +## return named(CombinerArray(storage(t), to_axes(inds(t))), name.(inds(t))) +## end to_nameddimsarray(t::ITensor) = ITensor(to_nameddimsarray(t.tensor)) diff --git a/src/backup/to_arraystorage.jl b/src/backup/to_arraystorage.jl new file mode 100644 index 0000000000..065684ae0f --- /dev/null +++ b/src/backup/to_arraystorage.jl @@ -0,0 +1,9 @@ +# TODO: Delete once `TensorStorage` is removed. +function NDTensors.to_arraystorage(x::ITensor) + return itensor(NDTensors.to_arraystorage(tensor(x))) +end + +# TODO: Delete once `TensorStorage` is removed. +function NDTensors.to_arraystorage(x::AbstractMPS) + return NDTensors.to_arraystorage.(x) +end diff --git a/src/itensor.jl b/src/itensor.jl index 8be083b4fe..4dd3cc518d 100644 --- a/src/itensor.jl +++ b/src/itensor.jl @@ -126,11 +126,6 @@ ITensor(is, st::TensorStorage)::ITensor = ITensor(NeverAlias(), st, is) itensor(T::ITensor) = T ITensor(T::ITensor) = copy(T) -# TODO: Delete once `TensorStorage` is removed. -function NDTensors.to_arraystorage(x::ITensor) - return itensor(NDTensors.to_arraystorage(tensor(x))) -end - """ itensor(args...; kwargs...) diff --git a/src/mps/abstractmps.jl b/src/mps/abstractmps.jl index 7a243d0b36..43634416a9 100644 --- a/src/mps/abstractmps.jl +++ b/src/mps/abstractmps.jl @@ -16,11 +16,6 @@ size(m::AbstractMPS) = size(data(m)) ndims(m::AbstractMPS) = ndims(data(m)) -# TODO: Delete once `TensorStorage` is removed. -function NDTensors.to_arraystorage(x::AbstractMPS) - return NDTensors.to_arraystorage.(x) -end - function promote_itensor_eltype(m::Vector{ITensor}) T = isassigned(m, 1) ? eltype(m[1]) : Number for n in 2:length(m) diff --git a/src/mps/dmrg.jl b/src/mps/dmrg.jl index 49444e606e..88e150c271 100644 --- a/src/mps/dmrg.jl +++ b/src/mps/dmrg.jl @@ -10,7 +10,7 @@ function permute( for n in 1:length(M) lₙ₋₁ = linkind(M, n - 1) lₙ = linkind(M, n) - s⃗ₙ = NDTensors.TupleTools.sort(Tuple(siteinds(M, n)); by=plev) + s⃗ₙ = TupleTools.sort(Tuple(siteinds(M, n)); by=plev) M̃[n] = permute(M[n], filter(!isnothing, (lₙ₋₁, s⃗ₙ..., lₙ))) end set_ortho_lims!(M̃, ortho_lims(M)) diff --git a/test/ITensorLegacyMPS/base/test_arraystorage.jl b/test/ITensorLegacyMPS/base/backup/test_arraystorage.jl similarity index 100% rename from test/ITensorLegacyMPS/base/test_arraystorage.jl rename to test/ITensorLegacyMPS/base/backup/test_arraystorage.jl diff --git a/test/base/test_arraystorage.jl b/test/base/backup/test_arraystorage.jl similarity index 100% rename from test/base/test_arraystorage.jl rename to test/base/backup/test_arraystorage.jl