diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 50d572e..a6fcbd2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: matrix: version: - '1.8' # lowest TensorOperations version - - '1.10' # LTS version + - 'lts' # LTS version - '1' # automatically expands to the latest stable 1.x release of Julia os: - ubuntu-latest @@ -32,7 +32,7 @@ jobs: - x64 steps: - uses: actions/checkout@v4 - - uses: julia-actions/setup-julia@v1 + - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} diff --git a/Project.toml b/Project.toml index 6981aed..1ced779 100644 --- a/Project.toml +++ b/Project.toml @@ -12,14 +12,19 @@ VectorInterface = "409d34a3-91d5-4945-b6ec-7529ddf182d8" [weakdeps] SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" -TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2" [extensions] SparseArrayKitSparseArrays = "SparseArrays" -SparseArrayKitTensorOperations = "TensorOperations" [compat] +Aqua = "0.8" +LinearAlgebra = "1" PackageExtensionCompat = "1" +Random = "1" +SparseArrays = "1" +Strided = "2" +Test = "1" +TestExtras = "0.3" TensorOperations = "5" TupleTools = "1.1" VectorInterface = "0.4.1" @@ -31,9 +36,8 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Strided = "5e0ebb24-38b0-5f93-81fe-25c709ecae67" -TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" TestExtras = "5ed8adda-3752-4e41-b88a-e8b09835ee3a" [targets] -test = ["Test", "TestExtras", "TensorOperations", "Strided", "Random", "LinearAlgebra", "SparseArrays", "Aqua"] +test = ["Test", "TestExtras", "Strided", "Random", "LinearAlgebra", "SparseArrays", "Aqua"] diff --git a/ext/SparseArrayKitTensorOperations.jl b/ext/SparseArrayKitTensorOperations.jl deleted file mode 100644 index 0947ebf..0000000 --- a/ext/SparseArrayKitTensorOperations.jl +++ /dev/null @@ -1,61 +0,0 @@ -module SparseArrayKitTensorOperations - -import TensorOperations as TO -using TensorOperations: Index2Tuple, linearize, numind -using SparseArrayKit: tensoradd!, tensortrace!, tensorcontract!, SparseArray - -struct SparseArrayBackend <: TO.AbstractBackend -end - -# ------------------------------------------------------------------------------------------ -# Default backend selection mechanism for AbstractArray instances -# ------------------------------------------------------------------------------------------ -function TO.select_backend(::typeof(TO.tensoradd!), C::SparseArray, A::SparseArray) - return SparseArrayBackend() -end - -function TO.select_backend(::typeof(TO.tensortrace!), C::SparseArray, A::SparseArray) - return SparseArrayBackend() -end - -function TO.select_backend(::typeof(TO.tensorcontract!), C::SparseArray, A::SparseArray, - B::SparseArray) - return SparseArrayBackend() -end - -function TO.tensoradd!(C::SparseArray, - A::SparseArray, pA::Index2Tuple, conjA::Bool, - α::Number, β::Number, - ::SparseArrayBackend, allocator) - return tensoradd!(C, A, conjA, linearize(pA), α, β) -end - -function TO.tensortrace!(C::SparseArray, - A::SparseArray, p::Index2Tuple, q::Index2Tuple, conjA::Bool, - α::Number, β::Number, - ::SparseArrayBackend, allocator) - return tensortrace!(C, A, conjA, linearize(p), q[1], q[2], α, β) -end - -function TO.tensorcontract!(C::SparseArray, - A::SparseArray, pA::Index2Tuple, conjA::Bool, - B::SparseArray, pB::Index2Tuple, conjB::Bool, - pAB::Index2Tuple, - α::Number, β::Number, - ::SparseArrayBackend, allocator) - return tensorcontract!(C, A, conjA, pA[1], pA[2], B, conjB, pB[2], pB[1], - linearize(pAB), α, β) -end - -function TO.tensoradd_type(TC, ::SparseArray, pA::Index2Tuple, ::Bool) - return SparseArray{TC,numind(pA)} -end - -function TO.tensorcontract_type(TC, - ::SparseArray, pA::Index2Tuple, conjA::Bool, - ::SparseArray, pB::Index2Tuple, conjB::Bool, - pAB::Index2Tuple) - return SparseArray{TC,numind(pAB)} -end - -end diff --git a/src/SparseArrayKit.jl b/src/SparseArrayKit.jl index 8b29ea5..fc45fa5 100644 --- a/src/SparseArrayKit.jl +++ b/src/SparseArrayKit.jl @@ -1,11 +1,10 @@ module SparseArrayKit using VectorInterface +using TensorOperations using LinearAlgebra using TupleTools -const IndexTuple{N} = NTuple{N,Int} - export SparseArray export nonzero_pairs, nonzero_keys, nonzero_values, nonzero_length diff --git a/src/base.jl b/src/base.jl index 7b40e4f..4b6fc28 100644 --- a/src/base.jl +++ b/src/base.jl @@ -54,7 +54,7 @@ end # array manipulation function Base.permutedims!(dst::SparseArray, src::SparseArray, p) - return tensoradd!(dst, src, false, tuple(p...), true, false) + return tensoradd!(dst, src, (tuple(p...), ()), false) end function Base.reshape(parent::SparseArray{T}, dims::Dims) where {T} diff --git a/src/linearalgebra.jl b/src/linearalgebra.jl index 8c08783..7c7b491 100644 --- a/src/linearalgebra.jl +++ b/src/linearalgebra.jl @@ -87,9 +87,12 @@ function LinearAlgebra.mul!(C::SM, A::ASM, B::ASM, α::Number, β::Number) AA = A isa Union{Adjoint,Transpose} ? parent(A) : A BB = B isa Union{Adjoint,Transpose} ? parent(B) : B - return tensorcontract!(C, AA, conjA, oindA, cindA, BB, conjB, oindB, cindB, (1, 2), α, + return tensorcontract!(C, AA, (oindA, cindA), conjA, BB, (cindB, oindB), conjB, + ((1, 2), ()), α, β) end -LinearAlgebra.adjoint!(C::SM, A::SM) = tensoradd!(C, A, true, (2, 1), One(), Zero()) -LinearAlgebra.transpose!(C::SM, A::SM) = tensoradd!(C, A, false, (2, 1), One(), Zero()) +LinearAlgebra.adjoint!(C::SM, A::SM) = tensoradd!(C, A, ((2, 1), ()), true, One(), Zero()) +function LinearAlgebra.transpose!(C::SM, A::SM) + return tensoradd!(C, A, ((2, 1), ()), false, One(), Zero()) +end diff --git a/src/tensoroperations.jl b/src/tensoroperations.jl index af16aa7..be91e08 100644 --- a/src/tensoroperations.jl +++ b/src/tensoroperations.jl @@ -1,99 +1,85 @@ +const TO = TensorOperations + +struct SparseArrayBackend <: TO.AbstractBackend end + # TensorOperations compatiblity #------------------------------- -function tensoradd!(C::SparseArray{<:Any,N}, - A::SparseArray{<:Any,N}, conjA::Bool, pA, - α::Number=One(), β::Number=One()) where {N} - (N == length(pA) && TupleTools.isperm(pA)) || - throw(ArgumentError("Invalid permutation of length $N: $pA")) - size(C) == TupleTools.getindices(size(A), pA) || - throw(DimensionMismatch("non-matching sizes while adding arrays")) +function TO.select_backend(::typeof(TO.tensoradd!), C::SparseArray, A::SparseArray) + return SparseArrayBackend() +end + +function TO.tensoradd!(C::SparseArray, + A::SparseArray, pA::Index2Tuple, conjA::Bool, + α::Number, β::Number, ::SparseArrayBackend, allocator) + TO.argcheck_tensoradd(C, A, pA) + TO.dimcheck_tensoradd(C, A, pA) + scale!(C, β) + pA_lin = linearize(pA) for (IA, vA) in A.data - IC = CartesianIndex(TupleTools.getindices(IA.I, pA)) + IC = CartesianIndex(TupleTools.getindices(IA.I, pA_lin)) C[IC] += α * (conjA ? conj(vA) : vA) end + return C end -function tensortrace!(C::SparseArray{<:Any,NC}, - A::SparseArray{<:Any,NA}, conjA::Bool, p, q1, q2, - α::Number=One(), β::Number=Zero()) where {NA,NC} - NC == length(p) || - throw(ArgumentError("Invalid selection of $NC out of $NA: $p")) - NA - NC == 2 * length(q1) == 2 * length(q2) || - throw(ArgumentError("invalid number of trace dimension")) - pA = (p..., q1..., q2...) - TupleTools.isperm(pA) || - throw(ArgumentError("invalid permutation of length $(ndims(A)): $pA")) - - sizeA = size(A) - sizeC = size(C) - - TupleTools.getindices(sizeA, q1) == TupleTools.getindices(sizeA, q2) || - throw(DimensionMismatch("non-matching trace sizes")) - sizeC == TupleTools.getindices(sizeA, p) || - throw(DimensionMismatch("non-matching sizes")) +function TO.select_backend(::typeof(TO.tensortrace!), C::SparseArray, A::SparseArray) + return SparseArrayBackend() +end + +function TO.tensortrace!(C::SparseArray, + A::SparseArray, p::Index2Tuple, q::Index2Tuple, conjA::Bool, + α::Number, β::Number, ::SparseArrayBackend, allocator) + TO.argcheck_tensortrace(C, A, p, q) + TO.dimcheck_tensortrace(C, A, p, q) scale!(C, β) + p_lin = linearize(p) for (IA, v) in A.data - IAc1 = CartesianIndex(TupleTools.getindices(IA.I, q1)) - IAc2 = CartesianIndex(TupleTools.getindices(IA.I, q2)) + IAc1 = CartesianIndex(TupleTools.getindices(IA.I, q[1])) + IAc2 = CartesianIndex(TupleTools.getindices(IA.I, q[2])) IAc1 == IAc2 || continue - IC = CartesianIndex(TupleTools.getindices(IA.I, p)) + IC = CartesianIndex(TupleTools.getindices(IA.I, p_lin)) C[IC] += α * (conjA ? conj(v) : v) end + return C end -function tensorcontract!(C::SparseArray, - A::SparseArray, conjA::Bool, oindA, cindA, - B::SparseArray, conjB::Bool, oindB, cindB, - indCinoAB, - α::Number=One(), β::Number=Zero()) - pA = (oindA..., cindA...) - (length(pA) == ndims(A) && TupleTools.isperm(pA)) || - throw(ArgumentError("invalid permutation of length $(ndims(A)): $pA")) - pB = (oindB..., cindB...) - (length(pB) == ndims(B) && TupleTools.isperm(pB)) || - throw(ArgumentError("invalid permutation of length $(ndims(B)): $pB")) - (length(oindA) + length(oindB) == ndims(C)) || - throw(ArgumentError("non-matching output indices in contraction")) - (ndims(C) == length(indCinoAB) && isperm(indCinoAB)) || - throw(ArgumentError("invalid permutation of length $(ndims(C)): $indCinoAB")) - - sizeA = size(A) - sizeB = size(B) - sizeC = size(C) - - csizeA = TupleTools.getindices(sizeA, cindA) - csizeB = TupleTools.getindices(sizeB, cindB) - osizeA = TupleTools.getindices(sizeA, oindA) - osizeB = TupleTools.getindices(sizeB, oindB) - - csizeA == csizeB || - throw(DimensionMismatch("non-matching sizes in contracted dimensions")) - TupleTools.getindices((osizeA..., osizeB...), indCinoAB) == size(C) || - throw(DimensionMismatch("non-matching sizes in uncontracted dimensions")) +function TO.select_backend(::typeof(TO.tensorcontract!), + C::SparseArray, A::SparseArray, B::SparseArray) + return SparseArrayBackend() +end + +function TO.tensorcontract!(C::SparseArray, + A::SparseArray, pA::Index2Tuple, conjA::Bool, + B::SparseArray, pB::Index2Tuple, conjB::Bool, + pAB::Index2Tuple, + α::Number, β::Number, ::SparseArrayBackend, allocator) + TO.argcheck_tensorcontract(C, A, pA, B, pB, pAB) + TO.dimcheck_tensorcontract(C, A, pA, B, pB, pAB) scale!(C, β) + pAB_lin = linearize(pAB) keysA = sort!(collect(nonzero_keys(A)); - by=IA -> CartesianIndex(TupleTools.getindices(IA.I, cindA))) + by=IA -> CartesianIndex(TupleTools.getindices(IA.I, pA[2]))) keysB = sort!(collect(nonzero_keys(B)); - by=IB -> CartesianIndex(TupleTools.getindices(IB.I, cindB))) + by=IB -> CartesianIndex(TupleTools.getindices(IB.I, pB[1]))) iA = iB = 1 @inbounds while iA <= length(keysA) && iB <= length(keysB) IA = keysA[iA] IB = keysB[iB] - IAc = CartesianIndex(TupleTools.getindices(IA.I, cindA)) - IBc = CartesianIndex(TupleTools.getindices(IB.I, cindB)) + IAc = CartesianIndex(TupleTools.getindices(IA.I, pA[2])) + IBc = CartesianIndex(TupleTools.getindices(IB.I, pB[1])) if IAc == IBc Ic = IAc jA = iA while jA < length(keysA) - if CartesianIndex(TupleTools.getindices(keysA[jA + 1].I, cindA)) == Ic + if CartesianIndex(TupleTools.getindices(keysA[jA + 1].I, pA[2])) == Ic jA += 1 else break @@ -101,7 +87,7 @@ function tensorcontract!(C::SparseArray, end jB = iB while jB < length(keysB) - if CartesianIndex(TupleTools.getindices(keysB[jB + 1].I, cindB)) == Ic + if CartesianIndex(TupleTools.getindices(keysB[jB + 1].I, pB[1])) == Ic jB += 1 else break @@ -112,13 +98,13 @@ function tensorcontract!(C::SparseArray, if length(rA) < length(rB) for kB in rB IB = keysB[kB] - IBo = CartesianIndex(TupleTools.getindices(IB.I, oindB)) + IBo = CartesianIndex(TupleTools.getindices(IB.I, pB[2])) vB = B[IB] for kA in rA IA = keysA[kA] - IAo = CartesianIndex(TupleTools.getindices(IA.I, oindA)) + IAo = CartesianIndex(TupleTools.getindices(IA.I, pA[1])) IABo = CartesianIndex(IAo, IBo) - IC = CartesianIndex(TupleTools.getindices(IABo.I, indCinoAB)) + IC = CartesianIndex(TupleTools.getindices(IABo.I, pAB_lin)) vA = A[IA] v = α * (conjA ? conj(vA) : vA) * (conjB ? conj(vB) : vB) increaseindex!(C, v, IC) @@ -127,14 +113,14 @@ function tensorcontract!(C::SparseArray, else for kA in rA IA = keysA[kA] - IAo = CartesianIndex(TupleTools.getindices(IA.I, oindA)) + IAo = CartesianIndex(TupleTools.getindices(IA.I, pA[1])) vA = A[IA] for kB in rB IB = keysB[kB] - IBo = CartesianIndex(TupleTools.getindices(IB.I, oindB)) + IBo = CartesianIndex(TupleTools.getindices(IB.I, pB[2])) vB = B[IB] IABo = CartesianIndex(IAo, IBo) - IC = CartesianIndex(TupleTools.getindices(IABo.I, indCinoAB)) + IC = CartesianIndex(TupleTools.getindices(IABo.I, pAB_lin)) v = α * (conjA ? conj(vA) : vA) * (conjB ? conj(vB) : vB) increaseindex!(C, v, IC) end @@ -148,5 +134,17 @@ function tensorcontract!(C::SparseArray, iB += 1 end end + return C end + +function TO.tensoradd_type(TC, ::SparseArray, pA::Index2Tuple, ::Bool) + return SparseArray{TC,TO.numind(pA)} +end + +function TO.tensorcontract_type(TC, + ::SparseArray, pA::Index2Tuple, conjA::Bool, + ::SparseArray, pB::Index2Tuple, conjB::Bool, + pAB::Index2Tuple) + return SparseArray{TC,TO.numind(pAB)} +end diff --git a/test/runtests.jl b/test/runtests.jl index ecb4510..32b7a75 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -13,7 +13,6 @@ include("contractions.jl") module AquaSparseArrayKit using SparseArrayKit, Aqua, Test @testset "Aqua" verbose = true begin - Aqua.test_all(SparseArrayKit; - project_toml_formatting=(VERSION >= v"1.9")) + Aqua.test_all(SparseArrayKit) end end