diff --git a/.gitignore b/.gitignore index 0ee3d17..5caf9ac 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,6 @@ *.jl.cov *.jl.*.cov *.jl.mem -Manifest.toml \ No newline at end of file +Manifest.toml +.vscode +.DS_Store \ No newline at end of file diff --git a/Project.toml b/Project.toml index ba0bf14..ca3254f 100644 --- a/Project.toml +++ b/Project.toml @@ -20,11 +20,13 @@ cuTENSOR = "011b41b2-24ef-40a8-b3eb-fa098493e9e1" Bumper = "8ce10254-0962-460f-a3d8-1f77fea1446e" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715" cuTENSOR = "011b41b2-24ef-40a8-b3eb-fa098493e9e1" [extensions] TensorOperationsBumperExt = "Bumper" TensorOperationsChainRulesCoreExt = "ChainRulesCore" +TensorOperationsOMEinsumContractionOrdersExt = "OMEinsumContractionOrders" TensorOperationscuTENSORExt = ["cuTENSOR", "CUDA"] [compat] @@ -37,6 +39,7 @@ DynamicPolynomials = "0.5" LRUCache = "1" LinearAlgebra = "1.6" Logging = "1.6" +OMEinsumContractionOrders = "0.9.2" PackageExtensionCompat = "1" PtrArrays = "1.2" Random = "1" @@ -55,9 +58,10 @@ CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" DynamicPolynomials = "7c1d4256-1411-5781-91ec-d7bc3513ac07" Logging = "56ddb016-857b-54e1-b83d-db4d58db5568" +OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" cuTENSOR = "011b41b2-24ef-40a8-b3eb-fa098493e9e1" [targets] -test = ["Test", "Random", "DynamicPolynomials", "ChainRulesTestUtils", "CUDA", "cuTENSOR", "Aqua", "Logging", "Bumper"] +test = ["Test", "Random", "DynamicPolynomials", "ChainRulesTestUtils", "CUDA", "cuTENSOR", "Aqua", "Logging", "Bumper", "OMEinsumContractionOrders"] diff --git a/ext/TensorOperationsOMEinsumContractionOrdersExt.jl b/ext/TensorOperationsOMEinsumContractionOrdersExt.jl new file mode 100644 index 0000000..1e846d9 --- /dev/null +++ b/ext/TensorOperationsOMEinsumContractionOrdersExt.jl @@ -0,0 +1,113 @@ +module TensorOperationsOMEinsumContractionOrdersExt + +using TensorOperations +using TensorOperations: TensorOperations as TO +using TensorOperations: TreeOptimizer +using OMEinsumContractionOrders +using OMEinsumContractionOrders: EinCode, NestedEinsum, SlicedEinsum, isleaf, + optimize_kahypar_auto + +function TO.optimaltree(network, optdata::Dict{TDK,TDV}, ::TreeOptimizer{:GreedyMethod}, + verbose::Bool) where {TDK,TDV} + @debug "Using optimizer GreedyMethod from OMEinsumContractionOrders" + ome_optimizer = GreedyMethod() + return optimize(network, optdata, ome_optimizer, verbose) +end + +function TO.optimaltree(network, optdata::Dict{TDK,TDV}, ::TreeOptimizer{:KaHyParBipartite}, + verbose::Bool) where {TDK,TDV} + @debug "Using optimizer KaHyParBipartite from OMEinsumContractionOrders" + return optimize_kahypar(network, optdata, verbose) +end + +function TO.optimaltree(network, optdata::Dict{TDK,TDV}, ::TreeOptimizer{:TreeSA}, + verbose::Bool) where {TDK,TDV} + @debug "Using optimizer TreeSA from OMEinsumContractionOrders" + ome_optimizer = TreeSA() + return optimize(network, optdata, ome_optimizer, verbose) +end + +function TO.optimaltree(network, optdata::Dict{TDK,TDV}, ::TreeOptimizer{:SABipartite}, + verbose::Bool) where {TDK,TDV} + @debug "Using optimizer SABipartite from OMEinsumContractionOrders" + ome_optimizer = SABipartite() + return optimize(network, optdata, ome_optimizer, verbose) +end + +function TO.optimaltree(network, optdata::Dict{TDK,TDV}, ::TreeOptimizer{:ExactTreewidth}, + verbose::Bool) where {TDK,TDV} + @debug "Using optimizer ExactTreewidth from OMEinsumContractionOrders" + ome_optimizer = ExactTreewidth() + return optimize(network, optdata, ome_optimizer, verbose) +end + +function optimize(network, optdata::Dict{TDK,TDV}, ome_optimizer::CodeOptimizer, + verbose::Bool) where {TDK,TDV} + @assert TDV <: Number "The values of `optdata` dictionary must be of `<:Number`" + + # transform the network as EinCode + code, size_dict = network2eincode(network, optdata) + # optimize the contraction order using OMEinsumContractionOrders, which gives a NestedEinsum + optcode = optimize_code(code, size_dict, ome_optimizer) + + # transform the optimized contraction order back to the network + optimaltree = eincode2contractiontree(optcode) + + # calculate the complexity of the contraction + cc = OMEinsumContractionOrders.contraction_complexity(optcode, size_dict) + if verbose + println("Optimal contraction tree: ", optimaltree) + println(cc) + end + return optimaltree, 2.0^(cc.tc) +end + +function optimize_kahypar(network, optdata::Dict{TDK,TDV}, verbose::Bool) where {TDK,TDV} + @assert TDV <: Number "The values of `optdata` dictionary must be of `<:Number`" + + # transform the network as EinCode + code, size_dict = network2eincode(network, optdata) + # optimize the contraction order using OMEinsumContractionOrders, which gives a NestedEinsum + optcode = optimize_kahypar_auto(code, size_dict) + + # transform the optimized contraction order back to the network + optimaltree = eincode2contractiontree(optcode) + + # calculate the complexity of the contraction + cc = OMEinsumContractionOrders.contraction_complexity(optcode, size_dict) + if verbose + println("Optimal contraction tree: ", optimaltree) + println(cc) + end + return optimaltree, 2.0^(cc.tc) +end + +function network2eincode(network, optdata) + indices = unique(vcat(network...)) + new_indices = Dict([i => j for (j, i) in enumerate(indices)]) + new_network = [Int[new_indices[i] for i in t] for t in network] + open_edges = Int[] + # if a indices appear only once, it is an open index + for i in indices + if sum([i in t for t in network]) == 1 + push!(open_edges, new_indices[i]) + end + end + size_dict = Dict([new_indices[i] => optdata[i] for i in keys(optdata)]) + return EinCode(new_network, open_edges), size_dict +end + +function eincode2contractiontree(eincode::NestedEinsum) + if isleaf(eincode) + return eincode.tensorindex + else + return [eincode2contractiontree(arg) for arg in eincode.args] + end +end + +# TreeSA returns a SlicedEinsum, with nslice = 0, so directly using the eins +function eincode2contractiontree(eincode::SlicedEinsum) + return eincode2contractiontree(eincode.eins) +end + +end diff --git a/src/TensorOperations.jl b/src/TensorOperations.jl index d3e6ff7..cf77398 100644 --- a/src/TensorOperations.jl +++ b/src/TensorOperations.jl @@ -29,6 +29,11 @@ export IndexTuple, Index2Tuple, linearize # export debug functionality export checkcontractible, tensorcost +# export optimizer +export TreeOptimizer, ExhaustiveSearchOptimizer, GreedyMethodOptimizer, + KaHyParBipartiteOptimizer, TreeSAOptimizer, SABipartiteOptimizer, + ExactTreewidthOptimizer + # Interface and index types #--------------------------- include("indices.jl") diff --git a/src/implementation/ncon.jl b/src/implementation/ncon.jl index 7dec553..8942533 100644 --- a/src/implementation/ncon.jl +++ b/src/implementation/ncon.jl @@ -1,5 +1,5 @@ """ - ncon(tensorlist, indexlist, [conjlist, sym]; order = ..., output = ..., backend = ..., allocator = ...) + ncon(tensorlist, indexlist, [conjlist, sym]; order = ..., output = ..., optimizer = ..., backend = ..., allocator = ...) Contract the tensors in `tensorlist` (of type `Vector` or `Tuple`) according to the network as specified by `indexlist`. Here, `indexlist` is a list (i.e. a `Vector` or `Tuple`) with @@ -20,11 +20,16 @@ over are labelled by increasing integers, i.e. first the contraction correspondi (negative, so increasing in absolute value) index labels. The keyword arguments `order` and `output` allow to change these defaults. +Another way to get the contraction order is to use the TreeOptimizer, by passing the `optimizer` +instead of the `order` keyword argument. The `optimizer` can be `:ExhaustiveSearch`. +With the extension `OMEinsumContractionOrders`, the `optimizer` can be one of the following: +`:GreedyMethod`, `:TreeSA`, `:KaHyParBipartite`, `:SABipartite`, `:ExactTreewidth`. + See also the macro version [`@ncon`](@ref). """ function ncon(tensors, network, conjlist=fill(false, length(tensors)); - order=nothing, output=nothing, kwargs...) + order=nothing, output=nothing, optimizer=nothing, kwargs...) length(tensors) == length(network) == length(conjlist) || throw(ArgumentError("number of tensors and of index lists should be the same")) isnconstyle(network) || throw(ArgumentError("invalid NCON network: $network")) @@ -39,11 +44,37 @@ function ncon(tensors, network, end (tensors, network) = resolve_traces(tensors, network) - tree = order === nothing ? ncontree(network) : indexordertree(network, order) + if isnothing(order) + if isnothing(optimizer) + # not specifing order and optimizer, tree via ncontree + tree = ncontree(network) + else + # order via optimizer + optdata = Dict{Any,Number}() + for (i, ids) in enumerate(network) + for (j, id) in enumerate(ids) + optdata[id] = tensorstructure(tensors[i], j, conjlist[i]) + end + end + tree = optimaltree(network, optdata, optimizer, false)[1] + end + else + if !isnothing(optimizer) + throw(ArgumentError("cannot specify both `order` and `optimizer`")) + else + # with given order, tree via indexordertree + tree = indexordertree(network, order) + end + end + + return ncon(tensors, network, conjlist, tree, output′; kwargs...) +end + +function ncon(tensors, network, conjlist, tree, output; kwargs...) A, IA, conjA = contracttree(tensors, network, conjlist, tree[1]; kwargs...) B, IB, conjB = contracttree(tensors, network, conjlist, tree[2]; kwargs...) - IC = tuple(output′...) + IC = tuple(output...) C = tensorcontract(IC, A, IA, conjA, B, IB, conjB; kwargs...) allocator = haskey(kwargs, :allocator) ? kwargs[:allocator] : DefaultAllocator() tree[1] isa Int || tensorfree!(A, allocator) diff --git a/src/indexnotation/optimaltree.jl b/src/indexnotation/optimaltree.jl index 5e1fd10..a3e8cee 100644 --- a/src/indexnotation/optimaltree.jl +++ b/src/indexnotation/optimaltree.jl @@ -1,4 +1,24 @@ -function optimaltree(network, optdata::Dict; verbose::Bool=false) +struct TreeOptimizer{T} end # T is a Symbol for the algorithm +ExhaustiveSearchOptimizer() = TreeOptimizer{:ExhaustiveSearch}() +GreedyMethodOptimizer() = TreeOptimizer{:GreedyMethod}() +KaHyParBipartiteOptimizer() = TreeOptimizer{:KaHyParBipartite}() +TreeSAOptimizer() = TreeOptimizer{:TreeSA}() +SABipartiteOptimizer() = TreeOptimizer{:SABipartite}() +ExactTreewidthOptimizer() = TreeOptimizer{:ExactTreewidth}() + +function optimaltree(network, optdata::Dict; + optimizer::TreeOptimizer{T}=TreeOptimizer{:ExhaustiveSearch}(), + verbose::Bool=false) where {T} + return optimaltree(network, optdata, optimizer, verbose) +end + +function optimaltree(network, optdata::Dict, ::TreeOptimizer{T}, verbose::Bool) where {T} + throw(ArgumentError("Unknown optimizer: $T. Hint: may need to load extensions, e.g. `using OMEinsumContractionOrders`")) +end + +function optimaltree(network, optdata::Dict, ::TreeOptimizer{:ExhaustiveSearch}, + verbose::Bool) + @debug "Using optimizer ExhaustiveSearch" numtensors = length(network) allindices = unique(vcat(network...)) numindices = length(allindices) diff --git a/src/indexnotation/tensormacros.jl b/src/indexnotation/tensormacros.jl index 678aa98..7b11913 100644 --- a/src/indexnotation/tensormacros.jl +++ b/src/indexnotation/tensormacros.jl @@ -71,6 +71,8 @@ function tensorparser(tensorexpr, kwargs...) end end # now handle the remaining keyword arguments + optimizer = TreeOptimizer{:ExhaustiveSearch}() # the default optimizer implemented in TensorOperations.jl + optval = nothing for (name, val) in kwargs if name == :order isexpr(val, :tuple) || @@ -86,18 +88,29 @@ function tensorparser(tensorexpr, kwargs...) throw(ArgumentError("Invalid use of `costcheck`, should be `costcheck=warn` or `costcheck=cache`")) parser.contractioncostcheck = val elseif name == :opt - if val isa Bool && val - optdict = optdata(tensorexpr) - elseif val isa Expr - optdict = optdata(val, tensorexpr) + optval = val + elseif name == :opt_algorithm + if val isa Symbol + optimizer = TreeOptimizer{val}() else - throw(ArgumentError("Invalid use of `opt`, should be `opt=true` or `opt=OptExpr`")) + throw(ArgumentError("Invalid use of `opt_algorithm`, should be `opt_algorithm=ExhaustiveSearch` or `opt_algorithm=NameOfAlgorithm`")) end - parser.contractiontreebuilder = network -> optimaltree(network, optdict)[1] elseif !(name == :backend || name == :allocator) # these two have been handled throw(ArgumentError("Unknown keyword argument `name`.")) end end + # construct the contraction tree builder after all keyword arguments have been processed + if !isnothing(optval) + if optval isa Bool && optval + optdict = optdata(tensorexpr) + elseif optval isa Expr + optdict = optdata(optval, tensorexpr) + else + throw(ArgumentError("Invalid use of `opt`, should be `opt=true` or `opt=OptExpr`")) + end + parser.contractiontreebuilder = network -> optimaltree(network, optdict; + optimizer=optimizer)[1] + end return parser end diff --git a/test/macro_kwargs.jl b/test/macro_kwargs.jl index 254b6a8..4ecddd0 100644 --- a/test/macro_kwargs.jl +++ b/test/macro_kwargs.jl @@ -106,3 +106,19 @@ end end @test D1 ≈ D2 ≈ D3 ≈ D4 ≈ D5 end + +@testset "opt_algorithm" begin + A = randn(5, 5, 5, 5) + B = randn(5, 5, 5) + C = randn(5, 5, 5) + + @tensor opt = true begin + D1[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @tensor opt = true opt_algorithm = ExhaustiveSearch begin + D2[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @test D1 ≈ D2 +end diff --git a/test/omeinsum.jl b/test/omeinsum.jl new file mode 100644 index 0000000..fc8491c --- /dev/null +++ b/test/omeinsum.jl @@ -0,0 +1,189 @@ +@testset "@tensor dependency check" begin + A = rand(2, 2) + B = rand(2, 2) + C = rand(2, 2) + @test_throws ArgumentError begin + ex = :(@tensor opt = (i => 2, j => 2, k => 2) opt_algorithm = GreedyMethod S[] := A[i, + j] * + B[j, + k] * + C[i, + k]) + macroexpand(Main, ex) + end +end + +using OMEinsumContractionOrders + +@testset "OMEinsumContractionOrders optimization algorithms" begin + A = randn(5, 5, 5, 5) + B = randn(5, 5, 5) + C = randn(5, 5, 5) + + @tensor begin + D1[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @tensor opt = (a => 5, b => 5, c => 5, d => 5, e => 5, f => 5, g => 5) opt_algorithm = GreedyMethod begin + D2[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @tensor opt = (a => 5, b => 5, c => 5, d => 5, e => 5, f => 5, g => 5) opt_algorithm = TreeSA begin + D3[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @tensor opt = (a => 5, b => 5, c => 5, d => 5, e => 5, f => 5, g => 5) opt_algorithm = KaHyParBipartite begin + D4[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @tensor opt = (a => 5, b => 5, c => 5, d => 5, e => 5, f => 5, g => 5) opt_algorithm = SABipartite begin + D5[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @tensor opt = (a => 5, b => 5, c => 5, d => 5, e => 5, f => 5, g => 5) opt_algorithm = ExactTreewidth begin + D6[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @tensor opt = (1 => 5, 2 => 5, 3 => 5, 4 => 5, 5 => 5, 6 => 5, 7 => 5) opt_algorithm = GreedyMethod begin + D7[1, 2, 3, 4] := A[1, 5, 3, 6] * B[7, 4, 5] * C[7, 6, 2] + end + + # check the case that opt_algorithm is before the opt + @tensor opt_algorithm = GreedyMethod opt = (a => 5, b => 5, c => 5, d => 5, e => 5, + f => 5, g => 5) begin + D8[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + @test D1 ≈ D2 ≈ D3 ≈ D4 ≈ D5 ≈ D6 ≈ D7 ≈ D8 + + A = rand(2, 2) + B = rand(2, 2, 2) + C = rand(2, 2) + D = rand(2, 2) + E = rand(2, 2, 2) + F = rand(2, 2) + + @tensor opt = true begin + s1[] := A[i, k] * B[i, j, l] * C[j, m] * D[k, n] * E[n, l, o] * F[o, m] + end + + @tensor opt = (i => 2, j => 2, k => 2, l => 2, m => 2, n => 2, o => 2) opt_algorithm = GreedyMethod begin + s2[] := A[i, k] * B[i, j, l] * C[j, m] * D[k, n] * E[n, l, o] * F[o, m] + end + + @tensor opt = (i => 2, j => 2, k => 2, l => 2, m => 2, n => 2, o => 2) opt_algorithm = TreeSA begin + s3[] := A[i, k] * B[i, j, l] * C[j, m] * D[k, n] * E[n, l, o] * F[o, m] + end + + @tensor opt = (i => 2, j => 2, k => 2, l => 2, m => 2, n => 2, o => 2) opt_algorithm = KaHyParBipartite begin + s4[] := A[i, k] * B[i, j, l] * C[j, m] * D[k, n] * E[n, l, o] * F[o, m] + end + + @tensor opt = (i => 2, j => 2, k => 2, l => 2, m => 2, n => 2, o => 2) opt_algorithm = SABipartite begin + s5[] := A[i, k] * B[i, j, l] * C[j, m] * D[k, n] * E[n, l, o] * F[o, m] + end + + @tensor opt = (i => 2, j => 2, k => 2, l => 2, m => 2, n => 2, o => 2) opt_algorithm = ExactTreewidth begin + s6[] := A[i, k] * B[i, j, l] * C[j, m] * D[k, n] * E[n, l, o] * F[o, m] + end + + @test s1 ≈ s2 ≈ s3 ≈ s4 ≈ s5 ≈ s6 + + A = randn(5, 5, 5) + B = randn(5, 5, 5) + C = randn(5, 5, 5) + α = randn() + + @tensor opt = true begin + D1[m] := A[i, j, k] * B[j, k, l] * C[i, l, m] + + α * A[i, j, k] * B[j, k, l] * C[i, l, m] + end + + @tensor opt = (i => 5, j => 5, k => 5, l => 5, m => 5) opt_algorithm = GreedyMethod begin + D2[m] := A[i, j, k] * B[j, k, l] * C[i, l, m] + + α * A[i, j, k] * B[j, k, l] * C[i, l, m] + end + + @tensor opt = (i => 5, j => 5, k => 5, l => 5, m => 5) opt_algorithm = TreeSA begin + D3[m] := A[i, j, k] * B[j, k, l] * C[i, l, m] + + α * A[i, j, k] * B[j, k, l] * C[i, l, m] + end + + @tensor opt = (i => 5, j => 5, k => 5, l => 5, m => 5) opt_algorithm = KaHyParBipartite begin + D4[m] := A[i, j, k] * B[j, k, l] * C[i, l, m] + + α * A[i, j, k] * B[j, k, l] * C[i, l, m] + end + + @tensor opt = (i => 5, j => 5, k => 5, l => 5, m => 5) opt_algorithm = SABipartite begin + D5[m] := A[i, j, k] * B[j, k, l] * C[i, l, m] + + α * A[i, j, k] * B[j, k, l] * C[i, l, m] + end + + @tensor opt = (i => 5, j => 5, k => 5, l => 5, m => 5) opt_algorithm = ExactTreewidth begin + D6[m] := A[i, j, k] * B[j, k, l] * C[i, l, m] + + α * A[i, j, k] * B[j, k, l] * C[i, l, m] + end + + @test D1 ≈ D2 ≈ D3 ≈ D4 ≈ D5 ≈ D6 +end + +@testset "ncon with OMEinsumContractionOrders" begin + A = randn(5, 5, 5, 5) + B = randn(5, 5, 5) + C = randn(5, 5, 5) + + @tensor begin + D1[a, b, c, d] := A[a, e, c, f] * B[g, d, e] * C[g, f, b] + end + + D2 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]) + D3 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=ExhaustiveSearchOptimizer()) + D4 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=GreedyMethodOptimizer()) + D5 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=KaHyParBipartiteOptimizer()) + D6 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=TreeSAOptimizer()) + D7 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=SABipartiteOptimizer()) + D8 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=ExactTreewidthOptimizer()) + + @test D1 ≈ D2 ≈ D3 ≈ D4 ≈ D5 ≈ D6 ≈ D7 ≈ D8 + + @test_throws ArgumentError begin + D9 = ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; order=[5, 6, 7], + optimizer=GreedyMethod()) + end + + @test_logs (:debug, "Using optimizer ExhaustiveSearch") min_level = Logging.Debug match_mode = :any begin + ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=ExhaustiveSearchOptimizer()) + end + + @test_logs (:debug, "Using optimizer GreedyMethod from OMEinsumContractionOrders") min_level = Logging.Debug match_mode = :any begin + ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=GreedyMethodOptimizer()) + end + + @test_logs (:debug, "Using optimizer KaHyParBipartite from OMEinsumContractionOrders") min_level = Logging.Debug match_mode = :any begin + ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=KaHyParBipartiteOptimizer()) + end + + @test_logs (:debug, "Using optimizer TreeSA from OMEinsumContractionOrders") min_level = Logging.Debug match_mode = :any begin + ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=TreeSAOptimizer()) + end + + @test_logs (:debug, "Using optimizer SABipartite from OMEinsumContractionOrders") min_level = Logging.Debug match_mode = :any begin + ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=SABipartiteOptimizer()) + end + + @test_logs (:debug, "Using optimizer ExactTreewidth from OMEinsumContractionOrders") min_level = Logging.Debug match_mode = :any begin + ncon([A, B, C], [[-1, 5, -3, 6], [7, -4, 5], [7, 6, -2]]; + optimizer=ExactTreewidthOptimizer()) + end +end diff --git a/test/runtests.jl b/test/runtests.jl index 5b06814..dc48e2e 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,6 +1,7 @@ using TensorOperations using LinearAlgebra using Test +using Logging using Random Random.seed!(1234567) @@ -39,6 +40,15 @@ end include("butensor.jl") end +# note: OMEinsumContractionOrders should not be loaded before this point +# as there is a test which requires it to be loaded after +# the tests only work when extension is supported (julia version >= 1.9) +if isdefined(Base, :get_extension) + @testset "OMEinsumOptimizer extension" begin + include("omeinsum.jl") + end +end + @testset "Polynomials" begin include("polynomials.jl") end