Skip to content

Commit

Permalink
Reabsorb TensorOperations as dependency instead of extension (#15)
Browse files Browse the repository at this point in the history
* use new lts syntax

* Reabsorb TensorOperations as full dependency

* Fix Aqua compat

* Add missing Compat entries
  • Loading branch information
lkdvos authored Oct 11, 2024
1 parent a17f420 commit e34f234
Show file tree
Hide file tree
Showing 8 changed files with 87 additions and 145 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
matrix:
version:
- '1.8' # lowest TensorOperations version
- '1.10' # LTS version
- 'lts' # LTS version
- '1' # automatically expands to the latest stable 1.x release of Julia
os:
- ubuntu-latest
Expand All @@ -32,7 +32,7 @@ jobs:
- x64
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v1
- uses: julia-actions/setup-julia@v2
with:
version: ${{ matrix.version }}
arch: ${{ matrix.arch }}
Expand Down
12 changes: 8 additions & 4 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,19 @@ VectorInterface = "409d34a3-91d5-4945-b6ec-7529ddf182d8"

[weakdeps]
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2"

[extensions]
SparseArrayKitSparseArrays = "SparseArrays"
SparseArrayKitTensorOperations = "TensorOperations"

[compat]
Aqua = "0.8"
LinearAlgebra = "1"
PackageExtensionCompat = "1"
Random = "1"
SparseArrays = "1"
Strided = "2"
Test = "1"
TestExtras = "0.3"
TensorOperations = "5"
TupleTools = "1.1"
VectorInterface = "0.4.1"
Expand All @@ -31,9 +36,8 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Strided = "5e0ebb24-38b0-5f93-81fe-25c709ecae67"
TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
TestExtras = "5ed8adda-3752-4e41-b88a-e8b09835ee3a"

[targets]
test = ["Test", "TestExtras", "TensorOperations", "Strided", "Random", "LinearAlgebra", "SparseArrays", "Aqua"]
test = ["Test", "TestExtras", "Strided", "Random", "LinearAlgebra", "SparseArrays", "Aqua"]
61 changes: 0 additions & 61 deletions ext/SparseArrayKitTensorOperations.jl

This file was deleted.

3 changes: 1 addition & 2 deletions src/SparseArrayKit.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
module SparseArrayKit

using VectorInterface
using TensorOperations
using LinearAlgebra
using TupleTools

const IndexTuple{N} = NTuple{N,Int}

export SparseArray
export nonzero_pairs, nonzero_keys, nonzero_values, nonzero_length

Expand Down
2 changes: 1 addition & 1 deletion src/base.jl
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ end

# array manipulation
function Base.permutedims!(dst::SparseArray, src::SparseArray, p)
return tensoradd!(dst, src, false, tuple(p...), true, false)
return tensoradd!(dst, src, (tuple(p...), ()), false)
end

function Base.reshape(parent::SparseArray{T}, dims::Dims) where {T}
Expand Down
9 changes: 6 additions & 3 deletions src/linearalgebra.jl
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,12 @@ function LinearAlgebra.mul!(C::SM, A::ASM, B::ASM, α::Number, β::Number)
AA = A isa Union{Adjoint,Transpose} ? parent(A) : A
BB = B isa Union{Adjoint,Transpose} ? parent(B) : B

return tensorcontract!(C, AA, conjA, oindA, cindA, BB, conjB, oindB, cindB, (1, 2), α,
return tensorcontract!(C, AA, (oindA, cindA), conjA, BB, (cindB, oindB), conjB,
((1, 2), ()), α,
β)
end

LinearAlgebra.adjoint!(C::SM, A::SM) = tensoradd!(C, A, true, (2, 1), One(), Zero())
LinearAlgebra.transpose!(C::SM, A::SM) = tensoradd!(C, A, false, (2, 1), One(), Zero())
LinearAlgebra.adjoint!(C::SM, A::SM) = tensoradd!(C, A, ((2, 1), ()), true, One(), Zero())
function LinearAlgebra.transpose!(C::SM, A::SM)
return tensoradd!(C, A, ((2, 1), ()), false, One(), Zero())
end
138 changes: 68 additions & 70 deletions src/tensoroperations.jl
Original file line number Diff line number Diff line change
@@ -1,107 +1,93 @@
const TO = TensorOperations

struct SparseArrayBackend <: TO.AbstractBackend end

# TensorOperations compatiblity
#-------------------------------
function tensoradd!(C::SparseArray{<:Any,N},
A::SparseArray{<:Any,N}, conjA::Bool, pA,
α::Number=One(), β::Number=One()) where {N}
(N == length(pA) && TupleTools.isperm(pA)) ||
throw(ArgumentError("Invalid permutation of length $N: $pA"))
size(C) == TupleTools.getindices(size(A), pA) ||
throw(DimensionMismatch("non-matching sizes while adding arrays"))
function TO.select_backend(::typeof(TO.tensoradd!), C::SparseArray, A::SparseArray)
return SparseArrayBackend()
end

function TO.tensoradd!(C::SparseArray,
A::SparseArray, pA::Index2Tuple, conjA::Bool,
α::Number, β::Number, ::SparseArrayBackend, allocator)
TO.argcheck_tensoradd(C, A, pA)
TO.dimcheck_tensoradd(C, A, pA)

scale!(C, β)
pA_lin = linearize(pA)
for (IA, vA) in A.data
IC = CartesianIndex(TupleTools.getindices(IA.I, pA))
IC = CartesianIndex(TupleTools.getindices(IA.I, pA_lin))
C[IC] += α * (conjA ? conj(vA) : vA)
end

return C
end

function tensortrace!(C::SparseArray{<:Any,NC},
A::SparseArray{<:Any,NA}, conjA::Bool, p, q1, q2,
α::Number=One(), β::Number=Zero()) where {NA,NC}
NC == length(p) ||
throw(ArgumentError("Invalid selection of $NC out of $NA: $p"))
NA - NC == 2 * length(q1) == 2 * length(q2) ||
throw(ArgumentError("invalid number of trace dimension"))
pA = (p..., q1..., q2...)
TupleTools.isperm(pA) ||
throw(ArgumentError("invalid permutation of length $(ndims(A)): $pA"))

sizeA = size(A)
sizeC = size(C)

TupleTools.getindices(sizeA, q1) == TupleTools.getindices(sizeA, q2) ||
throw(DimensionMismatch("non-matching trace sizes"))
sizeC == TupleTools.getindices(sizeA, p) ||
throw(DimensionMismatch("non-matching sizes"))
function TO.select_backend(::typeof(TO.tensortrace!), C::SparseArray, A::SparseArray)
return SparseArrayBackend()
end

function TO.tensortrace!(C::SparseArray,
A::SparseArray, p::Index2Tuple, q::Index2Tuple, conjA::Bool,
α::Number, β::Number, ::SparseArrayBackend, allocator)
TO.argcheck_tensortrace(C, A, p, q)
TO.dimcheck_tensortrace(C, A, p, q)

scale!(C, β)
p_lin = linearize(p)
for (IA, v) in A.data
IAc1 = CartesianIndex(TupleTools.getindices(IA.I, q1))
IAc2 = CartesianIndex(TupleTools.getindices(IA.I, q2))
IAc1 = CartesianIndex(TupleTools.getindices(IA.I, q[1]))
IAc2 = CartesianIndex(TupleTools.getindices(IA.I, q[2]))
IAc1 == IAc2 || continue

IC = CartesianIndex(TupleTools.getindices(IA.I, p))
IC = CartesianIndex(TupleTools.getindices(IA.I, p_lin))
C[IC] += α * (conjA ? conj(v) : v)
end

return C
end

function tensorcontract!(C::SparseArray,
A::SparseArray, conjA::Bool, oindA, cindA,
B::SparseArray, conjB::Bool, oindB, cindB,
indCinoAB,
α::Number=One(), β::Number=Zero())
pA = (oindA..., cindA...)
(length(pA) == ndims(A) && TupleTools.isperm(pA)) ||
throw(ArgumentError("invalid permutation of length $(ndims(A)): $pA"))
pB = (oindB..., cindB...)
(length(pB) == ndims(B) && TupleTools.isperm(pB)) ||
throw(ArgumentError("invalid permutation of length $(ndims(B)): $pB"))
(length(oindA) + length(oindB) == ndims(C)) ||
throw(ArgumentError("non-matching output indices in contraction"))
(ndims(C) == length(indCinoAB) && isperm(indCinoAB)) ||
throw(ArgumentError("invalid permutation of length $(ndims(C)): $indCinoAB"))

sizeA = size(A)
sizeB = size(B)
sizeC = size(C)

csizeA = TupleTools.getindices(sizeA, cindA)
csizeB = TupleTools.getindices(sizeB, cindB)
osizeA = TupleTools.getindices(sizeA, oindA)
osizeB = TupleTools.getindices(sizeB, oindB)

csizeA == csizeB ||
throw(DimensionMismatch("non-matching sizes in contracted dimensions"))
TupleTools.getindices((osizeA..., osizeB...), indCinoAB) == size(C) ||
throw(DimensionMismatch("non-matching sizes in uncontracted dimensions"))
function TO.select_backend(::typeof(TO.tensorcontract!),
C::SparseArray, A::SparseArray, B::SparseArray)
return SparseArrayBackend()
end

function TO.tensorcontract!(C::SparseArray,
A::SparseArray, pA::Index2Tuple, conjA::Bool,
B::SparseArray, pB::Index2Tuple, conjB::Bool,
pAB::Index2Tuple,
α::Number, β::Number, ::SparseArrayBackend, allocator)
TO.argcheck_tensorcontract(C, A, pA, B, pB, pAB)
TO.dimcheck_tensorcontract(C, A, pA, B, pB, pAB)

scale!(C, β)
pAB_lin = linearize(pAB)

keysA = sort!(collect(nonzero_keys(A));
by=IA -> CartesianIndex(TupleTools.getindices(IA.I, cindA)))
by=IA -> CartesianIndex(TupleTools.getindices(IA.I, pA[2])))
keysB = sort!(collect(nonzero_keys(B));
by=IB -> CartesianIndex(TupleTools.getindices(IB.I, cindB)))
by=IB -> CartesianIndex(TupleTools.getindices(IB.I, pB[1])))

iA = iB = 1
@inbounds while iA <= length(keysA) && iB <= length(keysB)
IA = keysA[iA]
IB = keysB[iB]
IAc = CartesianIndex(TupleTools.getindices(IA.I, cindA))
IBc = CartesianIndex(TupleTools.getindices(IB.I, cindB))
IAc = CartesianIndex(TupleTools.getindices(IA.I, pA[2]))
IBc = CartesianIndex(TupleTools.getindices(IB.I, pB[1]))
if IAc == IBc
Ic = IAc
jA = iA
while jA < length(keysA)
if CartesianIndex(TupleTools.getindices(keysA[jA + 1].I, cindA)) == Ic
if CartesianIndex(TupleTools.getindices(keysA[jA + 1].I, pA[2])) == Ic
jA += 1
else
break
end
end
jB = iB
while jB < length(keysB)
if CartesianIndex(TupleTools.getindices(keysB[jB + 1].I, cindB)) == Ic
if CartesianIndex(TupleTools.getindices(keysB[jB + 1].I, pB[1])) == Ic
jB += 1
else
break
Expand All @@ -112,13 +98,13 @@ function tensorcontract!(C::SparseArray,
if length(rA) < length(rB)
for kB in rB
IB = keysB[kB]
IBo = CartesianIndex(TupleTools.getindices(IB.I, oindB))
IBo = CartesianIndex(TupleTools.getindices(IB.I, pB[2]))
vB = B[IB]
for kA in rA
IA = keysA[kA]
IAo = CartesianIndex(TupleTools.getindices(IA.I, oindA))
IAo = CartesianIndex(TupleTools.getindices(IA.I, pA[1]))
IABo = CartesianIndex(IAo, IBo)
IC = CartesianIndex(TupleTools.getindices(IABo.I, indCinoAB))
IC = CartesianIndex(TupleTools.getindices(IABo.I, pAB_lin))
vA = A[IA]
v = α * (conjA ? conj(vA) : vA) * (conjB ? conj(vB) : vB)
increaseindex!(C, v, IC)
Expand All @@ -127,14 +113,14 @@ function tensorcontract!(C::SparseArray,
else
for kA in rA
IA = keysA[kA]
IAo = CartesianIndex(TupleTools.getindices(IA.I, oindA))
IAo = CartesianIndex(TupleTools.getindices(IA.I, pA[1]))
vA = A[IA]
for kB in rB
IB = keysB[kB]
IBo = CartesianIndex(TupleTools.getindices(IB.I, oindB))
IBo = CartesianIndex(TupleTools.getindices(IB.I, pB[2]))
vB = B[IB]
IABo = CartesianIndex(IAo, IBo)
IC = CartesianIndex(TupleTools.getindices(IABo.I, indCinoAB))
IC = CartesianIndex(TupleTools.getindices(IABo.I, pAB_lin))
v = α * (conjA ? conj(vA) : vA) * (conjB ? conj(vB) : vB)
increaseindex!(C, v, IC)
end
Expand All @@ -148,5 +134,17 @@ function tensorcontract!(C::SparseArray,
iB += 1
end
end

return C
end

function TO.tensoradd_type(TC, ::SparseArray, pA::Index2Tuple, ::Bool)
return SparseArray{TC,TO.numind(pA)}
end

function TO.tensorcontract_type(TC,
::SparseArray, pA::Index2Tuple, conjA::Bool,
::SparseArray, pB::Index2Tuple, conjB::Bool,
pAB::Index2Tuple)
return SparseArray{TC,TO.numind(pAB)}
end
3 changes: 1 addition & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ include("contractions.jl")
module AquaSparseArrayKit
using SparseArrayKit, Aqua, Test
@testset "Aqua" verbose = true begin
Aqua.test_all(SparseArrayKit;
project_toml_formatting=(VERSION >= v"1.9"))
Aqua.test_all(SparseArrayKit)
end
end

0 comments on commit e34f234

Please sign in to comment.