diff --git a/Project.toml b/Project.toml index 9306ec49..2a76aaa4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ITensorNetworks" uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7" authors = ["Matthew Fishman and contributors"] -version = "0.5" +version = "0.6" [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" diff --git a/src/approx_itensornetwork/ttn_svd.jl b/src/approx_itensornetwork/ttn_svd.jl index 958a80f9..59797c3e 100644 --- a/src/approx_itensornetwork/ttn_svd.jl +++ b/src/approx_itensornetwork/ttn_svd.jl @@ -2,7 +2,7 @@ using IterTools: partition """ Approximate a `partition` into an output ITensorNetwork with the binary tree structure defined by `out_tree` by -first transforming the partition into a TTN, then truncating +first transforming the partition into a ttn, then truncating the ttn using a sequence of SVDs. """ function _approx_itensornetwork_ttn_svd!( @@ -22,7 +22,7 @@ function _approx_itensornetwork_ttn_svd!( contraction_sequence_kwargs=contraction_sequence_kwargs, ) end - truncate_ttn = truncate(TTN(tn); cutoff=cutoff, maxdim=maxdim, root_vertex=root) + truncate_ttn = truncate(ttn(tn); cutoff=cutoff, maxdim=maxdim, root_vertex=root) out_tn = ITensorNetwork(truncate_ttn) root_tensor = out_tn[root] root_norm = norm(root_tensor) diff --git a/src/solvers/contract.jl b/src/solvers/contract.jl index 7a9fb2d9..cfc90fd6 100644 --- a/src/solvers/contract.jl +++ b/src/solvers/contract.jl @@ -22,7 +22,7 @@ function sum_contract( ) any(ns .!= n) && throw(DimensionMismatch("Number of sites in different operators ($n) do not match")) - # ToDo: Write test for single-vertex TTN, this implementation has not been tested. + # ToDo: Write test for single-vertex ttn, this implementation has not been tested. if n == 1 res = 0 for (tn1, tn2) in zip(tn1s, tn2s) diff --git a/src/specialitensornetworks.jl b/src/specialitensornetworks.jl index 967022b9..adb5f1f6 100644 --- a/src/specialitensornetworks.jl +++ b/src/specialitensornetworks.jl @@ -101,24 +101,24 @@ end """ Build an ITensor network on a graph specified by the inds network s. Bond_dim is given by link_space and entries are randomised (normal distribution, mean 0 std 1) """ -function randomITensorNetwork(eltype::Type, s::IndsNetwork; link_space=nothing) +function random_itensornetwork(eltype::Type, s::IndsNetwork; link_space=nothing) return ITensorNetwork(s; link_space) do v, inds... itensor(randn(eltype, dim(inds)...), inds...) end end -function randomITensorNetwork(s::IndsNetwork; link_space=nothing) - return randomITensorNetwork(Float64, s; link_space) +function random_itensornetwork(s::IndsNetwork; link_space=nothing) + return random_itensornetwork(Float64, s; link_space) end -@traitfn function randomITensorNetwork( +@traitfn function random_itensornetwork( eltype::Type, g::::IsUnderlyingGraph; link_space=nothing ) - return randomITensorNetwork(eltype, IndsNetwork(g); link_space) + return random_itensornetwork(eltype, IndsNetwork(g); link_space) end -@traitfn function randomITensorNetwork(g::::IsUnderlyingGraph; link_space=nothing) - return randomITensorNetwork(Float64, IndsNetwork(g); link_space) +@traitfn function random_itensornetwork(g::::IsUnderlyingGraph; link_space=nothing) + return random_itensornetwork(Float64, IndsNetwork(g); link_space) end """ @@ -126,7 +126,7 @@ Build an ITensor network on a graph specified by the inds network s. Bond_dim is given by link_space and entries are randomized. The random distribution is based on the input argument `distribution`. """ -function randomITensorNetwork( +function random_itensornetwork( distribution::Distribution, s::IndsNetwork; link_space=nothing ) return ITensorNetwork(s; link_space) do v, inds... @@ -134,8 +134,8 @@ function randomITensorNetwork( end end -@traitfn function randomITensorNetwork( +@traitfn function random_itensornetwork( distribution::Distribution, g::::IsUnderlyingGraph; link_space=nothing ) - return randomITensorNetwork(distribution, IndsNetwork(g); link_space) + return random_itensornetwork(distribution, IndsNetwork(g); link_space) end diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index 2b71f5db..f835297b 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -295,7 +295,7 @@ function Base.:+( @assert all(ψ -> nv(first(ψs)) == nv(ψ), ψs) # Output state - ϕ = TTN(siteinds(ψs[1])) + ϕ = ttn(siteinds(ψs[1])) vs = post_order_dfs_vertices(ϕ, root_vertex) es = post_order_dfs_edges(ϕ, root_vertex) diff --git a/src/treetensornetworks/opsum_to_ttn.jl b/src/treetensornetworks/opsum_to_ttn.jl index d782e818..4ffd1743 100644 --- a/src/treetensornetworks/opsum_to_ttn.jl +++ b/src/treetensornetworks/opsum_to_ttn.jl @@ -252,7 +252,7 @@ function ttn_svd( link_space[e] = Index(qi...; tags=edge_tag(e), dir=linkdir_ref) end - H = TTN(sites0) # initialize TTN without the dummy indices added + H = ttn(sites0) # initialize TTN without the dummy indices added function qnblock(i::Index, q::QN) for b in 2:(nblocks(i) - 1) flux(i, Block(b)) == q && return b @@ -496,12 +496,12 @@ function sorteachterm(os::OpSum, sites::IndsNetwork{V,<:Index}, root_vertex::V) end """ - TTN(os::OpSum, sites::IndsNetwork{<:Index}; kwargs...) - TTN(eltype::Type{<:Number}, os::OpSum, sites::IndsNetwork{<:Index}; kwargs...) + ttn(os::OpSum, sites::IndsNetwork{<:Index}; kwargs...) + ttn(eltype::Type{<:Number}, os::OpSum, sites::IndsNetwork{<:Index}; kwargs...) Convert an OpSum object `os` to a TreeTensorNetwork, with indices given by `sites`. """ -function TTN( +function ttn( os::OpSum, sites::IndsNetwork; root_vertex=default_root_vertex(sites), @@ -530,37 +530,37 @@ function TTN( end function mpo(os::OpSum, external_inds::Vector; kwargs...) - return TTN(os, path_indsnetwork(external_inds); kwargs...) + return ttn(os, path_indsnetwork(external_inds); kwargs...) end # Conversion from other formats -function TTN(o::Op, s::IndsNetwork; kwargs...) - return TTN(OpSum{Float64}() + o, s; kwargs...) +function ttn(o::Op, s::IndsNetwork; kwargs...) + return ttn(OpSum{Float64}() + o, s; kwargs...) end -function TTN(o::Scaled{C,Op}, s::IndsNetwork; kwargs...) where {C} - return TTN(OpSum{C}() + o, s; kwargs...) +function ttn(o::Scaled{C,Op}, s::IndsNetwork; kwargs...) where {C} + return ttn(OpSum{C}() + o, s; kwargs...) end -function TTN(o::Sum{Op}, s::IndsNetwork; kwargs...) - return TTN(OpSum{Float64}() + o, s; kwargs...) +function ttn(o::Sum{Op}, s::IndsNetwork; kwargs...) + return ttn(OpSum{Float64}() + o, s; kwargs...) end -function TTN(o::Prod{Op}, s::IndsNetwork; kwargs...) - return TTN(OpSum{Float64}() + o, s; kwargs...) +function ttn(o::Prod{Op}, s::IndsNetwork; kwargs...) + return ttn(OpSum{Float64}() + o, s; kwargs...) end -function TTN(o::Scaled{C,Prod{Op}}, s::IndsNetwork; kwargs...) where {C} - return TTN(OpSum{C}() + o, s; kwargs...) +function ttn(o::Scaled{C,Prod{Op}}, s::IndsNetwork; kwargs...) where {C} + return ttn(OpSum{C}() + o, s; kwargs...) end -function TTN(o::Sum{Scaled{C,Op}}, s::IndsNetwork; kwargs...) where {C} - return TTN(OpSum{C}() + o, s; kwargs...) +function ttn(o::Sum{Scaled{C,Op}}, s::IndsNetwork; kwargs...) where {C} + return ttn(OpSum{C}() + o, s; kwargs...) end # Catch-all for leaf eltype specification -function TTN(eltype::Type{<:Number}, os, sites::IndsNetwork; kwargs...) - return NDTensors.convert_scalartype(eltype, TTN(os, sites; kwargs...)) +function ttn(eltype::Type{<:Number}, os, sites::IndsNetwork; kwargs...) + return NDTensors.convert_scalartype(eltype, ttn(os, sites; kwargs...)) end # diff --git a/src/treetensornetworks/ttn.jl b/src/treetensornetworks/ttn.jl index 63148863..09364103 100644 --- a/src/treetensornetworks/ttn.jl +++ b/src/treetensornetworks/ttn.jl @@ -26,12 +26,16 @@ end const TTN = TreeTensorNetwork +function ttn(itensor_network::ITensorNetwork, ortho_center::Vector) + return ttn(itensor_network, ortho_center) +end + function data_graph_type(G::Type{<:TTN}) return data_graph_type(fieldtype(G, :itensor_network)) end function Base.copy(ψ::TTN) - return TTN(copy(ψ.itensor_network), copy(ψ.ortho_center)) + return ttn(copy(ψ.itensor_network), copy(ψ.ortho_center)) end # Field access @@ -44,36 +48,36 @@ data_graph(ψ::TTN) = data_graph(itensor_network(ψ)) # Constructor # -TTN(tn::ITensorNetwork, args...) = TTN{vertextype(tn)}(tn, args...) +ttn(tn::ITensorNetwork, args...) = TTN{vertextype(tn)}(tn, args...) # catch-all for default ElType -function TTN(g::AbstractGraph, args...; kwargs...) - return TTN(Float64, g, args...; kwargs...) +function ttn(g::AbstractGraph, args...; kwargs...) + return ttn(Float64, g, args...; kwargs...) end -function TTN(eltype::Type{<:Number}, graph::AbstractGraph, args...; kwargs...) +function ttn(eltype::Type{<:Number}, graph::AbstractGraph, args...; kwargs...) itensor_network = ITensorNetwork(eltype, graph; kwargs...) - return TTN(itensor_network, args...) + return ttn(itensor_network, args...) end # construct from given state (map) -function TTN(::Type{ElT}, is::AbstractIndsNetwork, initstate, args...) where {ElT<:Number} +function ttn(::Type{ElT}, is::AbstractIndsNetwork, initstate, args...) where {ElT<:Number} itensor_network = ITensorNetwork(ElT, is, initstate) - return TTN(itensor_network, args...) + return ttn(itensor_network, args...) end # Constructor from a collection of ITensors. # TODO: Support other collections like `Dictionary`, # interface for custom vertex names. -function TTN(ts::ITensorCollection) - return TTN(ITensorNetwork(ts)) +function ttn(ts::ITensorCollection) + return ttn(ITensorNetwork(ts)) end # TODO: Implement `random_circuit_ttn` for non-trivial # bond dimensions and correlations. # TODO: Implement random_ttn for QN-Index function random_ttn(args...; kwargs...) - T = TTN(args...; kwargs...) + T = ttn(args...; kwargs...) randn!.(vertex_data(T)) normalize!.(vertex_data(T)) return T @@ -91,14 +95,14 @@ function random_mps( else randomMPS(external_inds, states; linkdims=internal_inds_space) end - return TTN([tn_mps[v] for v in eachindex(tn_mps)]) + return ttn([tn_mps[v] for v in eachindex(tn_mps)]) end # # Construction from operator (map) # -function TTN( +function ttn( ::Type{ElT}, sites_map::Pair{<:AbstractIndsNetwork,<:AbstractIndsNetwork}, ops::Dictionary; @@ -110,7 +114,7 @@ function TTN( for v in vertices(sites) os *= Op(ops[v], v) end - T = TTN(ElT, os, sites; kwargs...) + T = ttn(ElT, os, sites; kwargs...) # see https://github.com/ITensor/ITensors.jl/issues/526 lognormT = lognorm(T) T /= exp(lognormT / N) # TODO: fix broadcasting for in-place assignment @@ -119,7 +123,7 @@ function TTN( return T end -function TTN( +function ttn( ::Type{ElT}, sites_map::Pair{<:AbstractIndsNetwork,<:AbstractIndsNetwork}, fops::Function; @@ -127,10 +131,10 @@ function TTN( ) where {ElT<:Number} sites = first(sites_map) # TODO: Use the sites_map ops = Dictionary(vertices(sites), map(v -> fops(v), vertices(sites))) - return TTN(ElT, sites, ops; kwargs...) + return ttn(ElT, sites, ops; kwargs...) end -function TTN( +function ttn( ::Type{ElT}, sites_map::Pair{<:AbstractIndsNetwork,<:AbstractIndsNetwork}, op::String; @@ -138,7 +142,7 @@ function TTN( ) where {ElT<:Number} sites = first(sites_map) # TODO: Use the sites_map ops = Dictionary(vertices(sites), fill(op, nv(sites))) - return TTN(ElT, sites, ops; kwargs...) + return ttn(ElT, sites, ops; kwargs...) end # Special constructors @@ -156,25 +160,8 @@ function mps(external_inds::Vector{<:Vector{<:Index}}; states) tn = insert_missing_internal_inds( tn, edges(g); internal_inds_space=trivial_space(indtype(external_inds)) ) - return TTN(tn) -end - -## function mps(external_inds::Vector{<:Index}; states) -## is = path_indsnetwork(external_inds) -## tn = TTN(underlying_graph(is)) -## tn = insert_missing_internal_inds(tn, trivial_space(indtype(is))) -## for v in vertices(tn) -## @show v -## @show tn[v] -## tn[v] *= state(only(is[v]), states(v)) -## @show tn[v] -## end -## return tn -## end - -## function productTTN(args...; kwargs...) -## return TTN(args...; link_space=1, kwargs...) -## end + return ttn(tn) +end # # Utility diff --git a/test/test_additensornetworks.jl b/test/test_additensornetworks.jl index c0c21d3f..f8bdde47 100644 --- a/test/test_additensornetworks.jl +++ b/test/test_additensornetworks.jl @@ -1,7 +1,7 @@ @eval module $(gensym()) using Graphs: rem_edge!, vertices using NamedGraphs: NamedEdge, hexagonal_lattice_graph, named_grid -using ITensorNetworks: ITensorNetwork, inner_network, randomITensorNetwork, siteinds +using ITensorNetworks: ITensorNetwork, inner_network, random_itensornetwork, siteinds using ITensors: ITensors, apply, op using Random: Random using Test: @test, @testset @@ -32,8 +32,8 @@ using Test: @test, @testset rem_edge!(s2, NamedEdge((1, 1) => (1, 2))) v = rand(vertices(g)) - ψ1 = randomITensorNetwork(s1; link_space=χ) - ψ2 = randomITensorNetwork(s2; link_space=χ) + ψ1 = random_itensornetwork(s1; link_space=χ) + ψ2 = random_itensornetwork(s2; link_space=χ) ψ12 = ψ1 + ψ2 diff --git a/test/test_apply.jl b/test/test_apply.jl index f129d6d2..01a82b7c 100644 --- a/test/test_apply.jl +++ b/test/test_apply.jl @@ -9,7 +9,7 @@ using ITensorNetworks: contract_inner, environment, norm_network, - randomITensorNetwork, + random_itensornetwork, siteinds, update using ITensors: ITensors @@ -25,7 +25,7 @@ using Test: @test, @testset g = named_grid(g_dims) s = siteinds("S=1/2", g) χ = 2 - ψ = randomITensorNetwork(s; link_space=χ) + ψ = random_itensornetwork(s; link_space=χ) v1, v2 = (2, 2), (1, 2) ψψ = norm_network(ψ) diff --git a/test/test_belief_propagation.jl b/test/test_belief_propagation.jl index 2ba83ce1..124d2635 100644 --- a/test/test_belief_propagation.jl +++ b/test/test_belief_propagation.jl @@ -16,7 +16,7 @@ using ITensorNetworks: flatten_networks, ising_network, linkinds_combiners, - randomITensorNetwork, + random_itensornetwork, siteinds, split_index, tensornetwork, @@ -40,7 +40,7 @@ ITensors.disable_warn_order() s = siteinds("S=1/2", g) χ = 4 Random.seed!(1234) - ψ = randomITensorNetwork(s; link_space=χ) + ψ = random_itensornetwork(s; link_space=χ) ψψ = ψ ⊗ prime(dag(ψ); sites=[]) @@ -70,7 +70,7 @@ ITensors.disable_warn_order() s = siteinds("S=1/2", g) χ = 2 Random.seed!(1564) - ψ = randomITensorNetwork(s; link_space=χ) + ψ = random_itensornetwork(s; link_space=χ) ψψ = ψ ⊗ prime(dag(ψ); sites=[]) @@ -117,7 +117,7 @@ ITensors.disable_warn_order() s = siteinds("S=1/2", g) vs = [(2, 2), (2, 3)] χ = 3 - ψ = randomITensorNetwork(s; link_space=χ) + ψ = random_itensornetwork(s; link_space=χ) ψψ = ψ ⊗ prime(dag(ψ); sites=[]) bpc = BeliefPropagationCache(ψψ, group(v -> v[1], vertices(ψψ))) @@ -141,7 +141,7 @@ ITensors.disable_warn_order() g = named_grid(g_dims) s = siteinds("S=1/2", g) χ = 2 - ψ = randomITensorNetwork(s; link_space=χ) + ψ = random_itensornetwork(s; link_space=χ) v = (2, 2) ψψ = flatten_networks(ψ, dag(ψ); combine_linkinds=false, map_bra_linkinds=prime) diff --git a/test/test_binary_tree_partition.jl b/test/test_binary_tree_partition.jl index 745072b8..5cc30f5d 100644 --- a/test/test_binary_tree_partition.jl +++ b/test/test_binary_tree_partition.jl @@ -16,7 +16,7 @@ using ITensorNetworks: ITensorNetwork, binary_tree_structure, path_graph_structure, - randomITensorNetwork + random_itensornetwork using NamedGraphs: NamedEdge, named_grid, post_order_dfs_vertices using OMEinsumContractionOrders: OMEinsumContractionOrders using Test: @test, @testset @@ -56,7 +56,7 @@ end @testset "test _binary_tree_partition_inds of a 2D network" begin N = (3, 3, 3) linkdim = 2 - network = randomITensorNetwork(IndsNetwork(named_grid(N)); link_space=linkdim) + network = random_itensornetwork(IndsNetwork(named_grid(N)); link_space=linkdim) tn = Array{ITensor,length(N)}(undef, N...) for v in vertices(network) tn[v...] = network[v...] diff --git a/test/test_contract_deltas.jl b/test/test_contract_deltas.jl index 36eddfe0..ebd6f6b2 100644 --- a/test/test_contract_deltas.jl +++ b/test/test_contract_deltas.jl @@ -11,7 +11,7 @@ using ITensorNetworks: IndsNetwork, ITensorNetwork, path_graph_structure, - randomITensorNetwork + random_itensornetwork using NamedGraphs: leaf_vertices, named_grid using Test: @test, @testset @@ -38,7 +38,7 @@ end @testset "test _contract_deltas over partition" begin N = (3, 3, 3) linkdim = 2 - network = randomITensorNetwork(IndsNetwork(named_grid(N)); link_space=linkdim) + network = random_itensornetwork(IndsNetwork(named_grid(N)); link_space=linkdim) tn = Array{ITensor,length(N)}(undef, N...) for v in vertices(network) tn[v...] = network[v...] diff --git a/test/test_contraction_sequence.jl b/test/test_contraction_sequence.jl index 26ff3e79..9e5d9d68 100644 --- a/test/test_contraction_sequence.jl +++ b/test/test_contraction_sequence.jl @@ -1,7 +1,7 @@ @eval module $(gensym()) using EinExprs: Exhaustive, Greedy, HyPar using ITensorNetworks: - contraction_sequence, norm_sqr_network, randomITensorNetwork, siteinds + contraction_sequence, norm_sqr_network, random_itensornetwork, siteinds using ITensors: ITensors, contract using NamedGraphs: named_grid using OMEinsumContractionOrders: OMEinsumContractionOrders @@ -15,7 +15,7 @@ Random.seed!(1234) g = named_grid(dims) s = siteinds("S=1/2", g) χ = 10 - ψ = randomITensorNetwork(s; link_space=χ) + ψ = random_itensornetwork(s; link_space=χ) tn = norm_sqr_network(ψ) seq_optimal = contraction_sequence(tn; alg="optimal") res_optimal = contract(tn; sequence=seq_optimal)[] diff --git a/test/test_contraction_sequence_to_graph.jl b/test/test_contraction_sequence_to_graph.jl index 2e21c2a4..cef57410 100644 --- a/test/test_contraction_sequence_to_graph.jl +++ b/test/test_contraction_sequence_to_graph.jl @@ -10,7 +10,7 @@ using ITensorNetworks: distance_to_leaf, flatten_networks, leaf_vertices, - randomITensorNetwork, + random_itensornetwork, siteinds using Test: @test, @testset using NamedGraphs: is_leaf, leaf_vertices, named_grid @@ -21,7 +21,7 @@ using NamedGraphs: is_leaf, leaf_vertices, named_grid g = named_grid(dims) s = siteinds("S=1/2", g) - ψ = randomITensorNetwork(s; link_space=2) + ψ = random_itensornetwork(s; link_space=2) ψψ = flatten_networks(ψ, ψ) seq = contraction_sequence(ψψ) diff --git a/test/test_forms.jl b/test/test_forms.jl index 75dfd5e8..d30cde40 100644 --- a/test/test_forms.jl +++ b/test/test_forms.jl @@ -15,7 +15,7 @@ using ITensorNetworks: ket_network, ket_vertex, operator_network, - randomITensorNetwork, + random_itensornetwork, siteinds, tensornetwork, union_all_inds, @@ -32,9 +32,9 @@ using Random: Random s_operator = union_all_inds(s_bra, s_ket) χ, D = 2, 3 Random.seed!(1234) - ψket = randomITensorNetwork(s_ket; link_space=χ) - ψbra = randomITensorNetwork(s_bra; link_space=χ) - A = randomITensorNetwork(s_operator; link_space=D) + ψket = random_itensornetwork(s_ket; link_space=χ) + ψbra = random_itensornetwork(s_bra; link_space=χ) + A = random_itensornetwork(s_operator; link_space=D) blf = BilinearFormNetwork(A, ψbra, ψket) @test nv(blf) == nv(ψket) + nv(ψbra) + nv(A) diff --git a/test/test_gauging.jl b/test/test_gauging.jl index ce7c8867..97938847 100644 --- a/test/test_gauging.jl +++ b/test/test_gauging.jl @@ -7,7 +7,7 @@ using ITensorNetworks: contract_inner, gauge_error, messages, - randomITensorNetwork, + random_itensornetwork, siteinds, update using ITensors: diagITensor, inds @@ -25,7 +25,7 @@ using Test: @test, @testset χ = 6 Random.seed!(5467) - ψ = randomITensorNetwork(s; link_space=χ) + ψ = random_itensornetwork(s; link_space=χ) # Move directly to vidal gauge ψ_vidal = VidalITensorNetwork(ψ) diff --git a/test/test_itensornetwork.jl b/test/test_itensornetwork.jl index b4200e46..c6d56b84 100644 --- a/test/test_itensornetwork.jl +++ b/test/test_itensornetwork.jl @@ -41,7 +41,7 @@ using ITensorNetworks: internalinds, linkinds, orthogonalize, - randomITensorNetwork, + random_itensornetwork, siteinds using LinearAlgebra: factorize using NamedGraphs: NamedEdge, incident_edges, named_comb_tree, named_grid @@ -150,7 +150,7 @@ using Test: @test, @test_broken, @testset @test has_vertex(tn, ((2, 2), 2)) end - @testset "Custom element type" for eltype in (Float32, Float64, ComplexF32, ComplexF64), + @testset "Custom element type" for elt in (Float32, Float64, ComplexF32, ComplexF64), link_space in (nothing, 3), g in ( grid((4,)), @@ -160,33 +160,33 @@ using Test: @test, @test_broken, @testset ) ψ = ITensorNetwork(g; link_space) do v, inds... - return itensor(randn(eltype, dims(inds)...), inds...) + return itensor(randn(elt, dims(inds)...), inds...) end - @test Base.eltype(ψ[first(vertices(ψ))]) == eltype + @test eltype(ψ[first(vertices(ψ))]) == elt ψ = ITensorNetwork(g; link_space) do v, inds... return itensor(randn(dims(inds)...), inds...) end - @test Base.eltype(ψ[first(vertices(ψ))]) == Float64 - ψ = randomITensorNetwork(eltype, g; link_space) - @test Base.eltype(ψ[first(vertices(ψ))]) == eltype - ψ = randomITensorNetwork(g; link_space) - @test Base.eltype(ψ[first(vertices(ψ))]) == Float64 - ψ = ITensorNetwork(eltype, undef, g; link_space) - @test Base.eltype(ψ[first(vertices(ψ))]) == eltype + @test eltype(ψ[first(vertices(ψ))]) == Float64 + ψ = random_itensornetwork(elt, g; link_space) + @test eltype(ψ[first(vertices(ψ))]) == elt + ψ = random_itensornetwork(g; link_space) + @test elt(ψ[first(vertices(ψ))]) == Float64 + ψ = ITensorNetwork(elt, undef, g; link_space) + @test eltype(ψ[first(vertices(ψ))]) == elt ψ = ITensorNetwork(undef, g) - @test Base.eltype(ψ[first(vertices(ψ))]) == Float64 + @test eltype(ψ[first(vertices(ψ))]) == Float64 end - @testset "randomITensorNetwork with custom distributions" begin + @testset "random_itensornetwork with custom distributions" begin distribution = Uniform(-1.0, 1.0) - tn = randomITensorNetwork(distribution, named_grid(4); link_space=2) + tn = random_itensornetwork(distribution, named_grid(4); link_space=2) # Note: distributions in package `Distributions` currently doesn't support customized # eltype, and all elements have type `Float64` - @test Base.eltype(tn[first(vertices(tn))]) == Float64 + @test eltype(tn[first(vertices(tn))]) == Float64 end @testset "orthogonalize" begin - tn = randomITensorNetwork(named_grid(4); link_space=2) + tn = random_itensornetwork(named_grid(4); link_space=2) Z = contract(inner_network(tn, tn))[] tn_ortho = factorize(tn, 4 => 3) @@ -266,18 +266,18 @@ using Test: @test, @test_broken, @testset @test length(internalinds(ψ)) == length(edges(g)) end - @testset "ElType conversion, $new_eltype" for new_eltype in (Float32, ComplexF64) + @testset "eltype conversion, $new_eltype" for new_eltype in (Float32, ComplexF64) dims = (2, 2) g = named_grid(dims) s = siteinds("S=1/2", g) - ψ = randomITensorNetwork(s; link_space=2) + ψ = random_itensornetwork(s; link_space=2) @test ITensors.scalartype(ψ) == Float64 ϕ = ITensors.convert_leaf_eltype(new_eltype, ψ) @test ITensors.scalartype(ϕ) == new_eltype end - @testset "Construction from state map" for ElT in (Float32, ComplexF64) + @testset "Construction from state map" for elt in (Float32, ComplexF64) dims = (2, 2) g = named_grid(dims) s = siteinds("S=1/2", g) @@ -291,13 +291,13 @@ using Test: @test, @test_broken, @testset @test abs(t[si => "↑", [b => end for b in bi]...]) == 1.0 # insert_links introduces extra signs through factorization... @test t[si => "↓", [b => end for b in bi]...] == 0.0 - ϕ = ITensorNetwork(ElT, s, state_map) + ϕ = ITensorNetwork(elt, s, state_map) t = ϕ[2, 2] si = only(siteinds(ϕ, (2, 2))) bi = map(e -> only(linkinds(ϕ, e)), incident_edges(ϕ, (2, 2))) - @test eltype(t) == ElT - @test abs(t[si => "↑", [b => end for b in bi]...]) == convert(ElT, 1.0) # insert_links introduces extra signs through factorization... - @test t[si => "↓", [b => end for b in bi]...] == convert(ElT, 0.0) + @test eltype(t) == elt + @test abs(t[si => "↑", [b => end for b in bi]...]) == convert(elt, 1.0) # insert_links introduces extra signs through factorization... + @test t[si => "↓", [b => end for b in bi]...] == convert(elt, 0.0) end @testset "Priming and tagging" begin @@ -306,7 +306,7 @@ using Test: @test, @test_broken, @testset tooth_lengths = fill(2, 3) c = named_comb_tree(tooth_lengths) is = siteinds("S=1/2", c) - tn = randomITensorNetwork(is; link_space=3) + tn = random_itensornetwork(is; link_space=3) @test_broken swapprime(tn, 0, 2) end end diff --git a/test/test_opsum_to_ttn.jl b/test/test_opsum_to_ttn.jl index 0a3952b5..5d6c394f 100644 --- a/test/test_opsum_to_ttn.jl +++ b/test/test_opsum_to_ttn.jl @@ -16,7 +16,7 @@ using ITensors: using ITensors.ITensorMPS: MPO using ITensors.NDTensors: matrix using ITensorGaussianMPS: hopping_hamiltonian -using ITensorNetworks: ITensorNetworks, OpSum, TTN, relabel_sites, siteinds +using ITensorNetworks: ITensorNetworks, OpSum, ttn, relabel_sites, siteinds using KrylovKit: eigsolve using LinearAlgebra: eigvals, norm using NamedGraphs: leaf_vertices, named_comb_tree, named_grid, post_order_dfs_vertices @@ -61,7 +61,7 @@ end @testset "Svd approach" for root_vertex in leaf_vertices(is) # get TTN Hamiltonian directly - Hsvd = TTN(H, is; root_vertex=root_vertex, cutoff=1e-10) + Hsvd = ttn(H, is; root_vertex=root_vertex, cutoff=1e-10) # get corresponding MPO Hamiltonian Hline = MPO(relabel_sites(H, vmap), sites) # compare resulting dense Hamiltonians @@ -72,7 +72,7 @@ end @test Tttno ≈ Tmpo rtol = 1e-6 # this breaks for longer range interactions - Hsvd_lr = TTN(Hlr, is; root_vertex=root_vertex, algorithm="svd", cutoff=1e-10) + Hsvd_lr = ttn(Hlr, is; root_vertex=root_vertex, algorithm="svd", cutoff=1e-10) Hline_lr = MPO(relabel_sites(Hlr, vmap), sites) @disable_warn_order begin Tttno_lr = prod(Hline_lr) @@ -96,9 +96,9 @@ end os1 += 1.0, "Sx", (1, 1) os2 = OpSum() os2 += 1.0, "Sy", (1, 1) - H1 = TTN(os1, s) - H2 = TTN(os2, s) - H3 = TTN(os1 + os2, s) + H1 = ttn(os1, s) + H2 = ttn(os2, s) + H3 = ttn(os1 + os2, s) @test H1 + H2 ≈ H3 rtol = 1e-6 if auto_fermion_enabled @@ -138,7 +138,7 @@ end @testset "Svd approach" for root_vertex in leaf_vertices(is) # get TTN Hamiltonian directly - Hsvd = TTN(H, is; root_vertex=root_vertex, cutoff=1e-10) + Hsvd = ttn(H, is; root_vertex=root_vertex, cutoff=1e-10) # get corresponding MPO Hamiltonian Hline = MPO(relabel_sites(H, vmap), sites) # compare resulting sparse Hamiltonians @@ -150,7 +150,7 @@ end @test Tttno ≈ Tmpo rtol = 1e-6 # this breaks for longer range interactions ###not anymore - Hsvd_lr = TTN(Hlr, is; root_vertex=root_vertex, algorithm="svd", cutoff=1e-10) + Hsvd_lr = ttn(Hlr, is; root_vertex=root_vertex, algorithm="svd", cutoff=1e-10) Hline_lr = MPO(relabel_sites(Hlr, vmap), sites) @disable_warn_order begin Tttno_lr = prod(Hline_lr) @@ -182,7 +182,7 @@ end @testset "Svd approach" for root_vertex in leaf_vertices(is) # get TTN Hamiltonian directly - Hsvd = TTN(H, is; root_vertex=root_vertex, cutoff=1e-10) + Hsvd = ttn(H, is; root_vertex=root_vertex, cutoff=1e-10) # get corresponding MPO Hamiltonian sites = [only(is[v]) for v in reverse(post_order_dfs_vertices(c, root_vertex))] vmap = Dictionary(reverse(post_order_dfs_vertices(c, root_vertex)), 1:length(sites)) @@ -252,7 +252,7 @@ end @testset "Svd approach" for root_vertex in leaf_vertices(is) # get TTN Hamiltonian directly - Hsvd = TTN(H, is_missing_site; root_vertex=root_vertex, cutoff=1e-10) + Hsvd = ttn(H, is_missing_site; root_vertex=root_vertex, cutoff=1e-10) # get corresponding MPO Hamiltonian Hline = MPO(relabel_sites(H, vmap), sites) @@ -263,7 +263,7 @@ end end @test Tttno ≈ Tmpo rtol = 1e-6 - Hsvd_lr = TTN( + Hsvd_lr = ttn( Hlr, is_missing_site; root_vertex=root_vertex, algorithm="svd", cutoff=1e-10 ) Hline_lr = MPO(relabel_sites(Hlr, vmap), sites) diff --git a/test/test_tno.jl b/test/test_tno.jl index a9868eae..f42ca585 100644 --- a/test/test_tno.jl +++ b/test/test_tno.jl @@ -8,7 +8,7 @@ using ITensorNetworks: gate_group_to_tno, get_tnos, ising, - randomITensorNetwork, + random_itensornetwork, siteinds using ITensors: ITensor, noprime using NamedGraphs: named_grid @@ -32,7 +32,7 @@ using Test: @test, @testset #Construct a single tno which represents prod(gates) single_tno = gate_group_to_tno(s, gates) - ψ = randomITensorNetwork(s; link_space=2) + ψ = random_itensornetwork(s; link_space=2) ψ_gated = copy(ψ) for gate in gates diff --git a/test/test_treetensornetworks/test_expect.jl b/test/test_treetensornetworks/test_expect.jl index 82db353a..3acbd83b 100644 --- a/test/test_treetensornetworks/test_expect.jl +++ b/test/test_treetensornetworks/test_expect.jl @@ -1,7 +1,7 @@ @eval module $(gensym()) using Graphs: vertices using ITensors.ITensorMPS: MPS -using ITensorNetworks: TTN, expect, random_mps, siteinds +using ITensorNetworks: ttn, expect, random_mps, siteinds using NamedGraphs: named_comb_tree using Test: @test, @testset @@ -27,7 +27,7 @@ end magnetization[v] = isodd(i) ? 0.5 : -0.5 end states = v -> d[v] - state = TTN(s, states) + state = ttn(s, states) res = expect("Sz", state) @test all([isapprox(res[v], magnetization[v]; atol=1e-8) for v in vertices(s)]) end diff --git a/test/test_treetensornetworks/test_position.jl b/test/test_treetensornetworks/test_position.jl index f1c1e0a8..096141fd 100644 --- a/test/test_treetensornetworks/test_position.jl +++ b/test/test_treetensornetworks/test_position.jl @@ -1,7 +1,7 @@ @eval module $(gensym()) using Graphs: vertices using ITensors: ITensors -using ITensorNetworks: ITensorNetworks, ProjTTN, TTN, environments, position, siteinds +using ITensorNetworks: ITensorNetworks, ProjTTN, ttn, environments, position, siteinds using NamedGraphs: named_comb_tree using Test @@ -23,14 +23,14 @@ using Test os = ITensorNetworks.heisenberg(c) - H = TTN(os, s) + H = ttn(os, s) d = Dict() for (i, v) in enumerate(vertices(s)) d[v] = isodd(i) ? "Up" : "Dn" end states = v -> d[v] - psi = TTN(s, states) + psi = ttn(s, states) # actual test, verifies that position is out of place vs = vertices(s) diff --git a/test/test_treetensornetworks/test_solvers/test_contract.jl b/test/test_treetensornetworks/test_solvers/test_contract.jl index c78b2c64..9e208150 100644 --- a/test/test_treetensornetworks/test_solvers/test_contract.jl +++ b/test/test_treetensornetworks/test_solvers/test_contract.jl @@ -5,7 +5,7 @@ using ITensorNetworks: OpSum, ProjOuterProdTTN, ProjTTNSum, - TTN, + ttn, TreeTensorNetwork, apply, contract, @@ -105,7 +105,7 @@ end psi = normalize(random_ttn(s; link_space=8)) os = ITensorNetworks.heisenberg(c; J1=1, J2=1) - H = TTN(os, s) + H = ttn(os, s) # Test basic usage with default parameters Hpsi = apply(H, psi; alg="fit", init=psi, nsweeps=1, cutoff=eps()) @@ -120,7 +120,7 @@ end # BLAS.axpy-like test os_id = OpSum() os_id += -1, "Id", vertices(s)[1], "Id", vertices(s)[1] - minus_identity = TTN(os_id, s) + minus_identity = ttn(os_id, s) Hpsi = ITensorNetworks.sum_apply( [(H, psi), (minus_identity, psi)]; alg="fit", init=psi, nsweeps=1 ) diff --git a/test/test_treetensornetworks/test_solvers/test_dmrg.jl b/test/test_treetensornetworks/test_solvers/test_dmrg.jl index 00eb181b..828b168e 100644 --- a/test/test_treetensornetworks/test_solvers/test_dmrg.jl +++ b/test/test_treetensornetworks/test_solvers/test_dmrg.jl @@ -7,7 +7,7 @@ using ITensors.ITensorMPS: MPO, MPS, randomMPS using ITensorNetworks: ITensorNetworks, OpSum, - TTN, + ttn, apply, dmrg, inner, @@ -179,7 +179,7 @@ end os = ITensorNetworks.heisenberg(c) - H = TTN(os, s) + H = ttn(os, s) # make init_state d = Dict() @@ -187,7 +187,7 @@ end d[v] = isodd(i) ? "Up" : "Dn" end states = v -> d[v] - psi = TTN(s, states) + psi = ttn(s, states) # psi = random_ttn(s; link_space=20) #FIXME: random_ttn broken for QN conserving case @@ -243,14 +243,14 @@ end ITensors.enable_auto_fermion() # now get auto-fermion results - H = TTN(os, s) + H = ttn(os, s) # make init_state d = Dict() for (i, v) in enumerate(vertices(s)) d[v] = isodd(i) ? "Up" : "Dn" end states = v -> d[v] - psi = TTN(s, states) + psi = ttn(s, states) psi = dmrg( H, psi; nsweeps, maxdim, cutoff, nsites, updater_kwargs=(; krylovdim=3, maxiter=1) ) @@ -277,7 +277,7 @@ end c = named_comb_tree((3, 2)) s = siteinds("S=1/2", c) os = ITensorNetworks.heisenberg(c) - H = TTN(os, s) + H = ttn(os, s) psi = random_ttn(s; link_space=5) psi = dmrg(H, psi; nsweeps, maxdim, nsites) diff --git a/test/test_treetensornetworks/test_solvers/test_dmrg_x.jl b/test/test_treetensornetworks/test_solvers/test_dmrg_x.jl index 505a4775..309144d1 100644 --- a/test/test_treetensornetworks/test_solvers/test_dmrg_x.jl +++ b/test/test_treetensornetworks/test_solvers/test_dmrg_x.jl @@ -3,7 +3,7 @@ using Graphs: nv using ITensorNetworks: ITensorNetworks, OpSum, - TTN, + ttn, apply, contract, dmrg_x, @@ -60,11 +60,11 @@ end # Random fields h ∈ [-W, W] h = W * (2 * rand(nv(c)) .- 1) - H = TTN(ITensorNetworks.heisenberg(c; h), s) + H = ttn(ITensorNetworks.heisenberg(c; h), s) - # TODO: Use `TTN(s; states=v -> rand(["↑", "↓"]))` or + # TODO: Use `ttn(s; states=v -> rand(["↑", "↓"]))` or # `ttns(s; states=v -> rand(["↑", "↓"]))` - ψ = normalize(TTN(s, v -> rand(["↑", "↓"]))) + ψ = normalize(ttn(s, v -> rand(["↑", "↓"]))) dmrg_x_kwargs = (nsweeps=20, normalize=true, maxdim=20, cutoff=1e-10, outputlevel=0) diff --git a/test/test_treetensornetworks/test_solvers/test_tdvp.jl b/test/test_treetensornetworks/test_solvers/test_tdvp.jl index 1bc39a19..9b05cd95 100644 --- a/test/test_treetensornetworks/test_solvers/test_tdvp.jl +++ b/test/test_treetensornetworks/test_solvers/test_tdvp.jl @@ -4,7 +4,7 @@ using ITensors: ITensor, contract, dag, inner, noprime, normalize, prime, scalar using ITensorNetworks: ITensorNetworks, OpSum, - TTN, + ttn, apply, expect, mpo, @@ -420,7 +420,7 @@ end os = ITensorNetworks.heisenberg(c) - H = TTN(os, s) + H = ttn(os, s) ψ0 = normalize(random_ttn(s)) @@ -460,8 +460,8 @@ end os2 += "Sz", src(e), "Sz", dst(e) end - H1 = TTN(os1, s) - H2 = TTN(os2, s) + H1 = ttn(os1, s) + H2 = ttn(os2, s) Hs = [H1, H2] ψ0 = normalize(random_ttn(s; link_space=10)) @@ -496,12 +496,12 @@ end s = siteinds("S=1/2", c) os = ITensorNetworks.heisenberg(c) - H = TTN(os, s) + H = ttn(os, s) HM = contract(H) Ut = exp(-im * tau * HM) - state = TTN(ComplexF64, s, v -> iseven(sum(isodd.(v))) ? "Up" : "Dn") + state = ttn(ComplexF64, s, v -> iseven(sum(isodd.(v))) ? "Up" : "Dn") statex = contract(state) Sz_tdvp = Float64[] @@ -544,7 +544,7 @@ end s = siteinds("S=1/2", c) os = ITensorNetworks.heisenberg(c) - H = TTN(os, s) + H = ttn(os, s) gates = ITensor[] for e in edges(c) @@ -559,7 +559,7 @@ end end append!(gates, reverse(gates)) - state = TTN(s, v -> iseven(sum(isodd.(v))) ? "Up" : "Dn") + state = ttn(s, v -> iseven(sum(isodd.(v))) ? "Up" : "Dn") phi = copy(state) c = (2, 1) @@ -598,7 +598,7 @@ end # Evolve using TDVP # - phi = TTN(s, v -> iseven(sum(isodd.(v))) ? "Up" : "Dn") + phi = ttn(s, v -> iseven(sum(isodd.(v))) ? "Up" : "Dn") obs = observer( "Sz" => (; state) -> expect("Sz", state; vertices=[c])[c], "En" => (; state) -> real(inner(state', H, state)), @@ -630,7 +630,7 @@ end s = siteinds("S=1/2", c) os = ITensorNetworks.heisenberg(c) - H = TTN(os, s) + H = ttn(os, s) state = normalize(random_ttn(s; link_space=2)) diff --git a/test/test_treetensornetworks/test_solvers/test_tdvp_time_dependent.jl b/test/test_treetensornetworks/test_solvers/test_tdvp_time_dependent.jl index 872a6720..6bae3b7c 100644 --- a/test/test_treetensornetworks/test_solvers/test_tdvp_time_dependent.jl +++ b/test/test_treetensornetworks/test_solvers/test_tdvp_time_dependent.jl @@ -1,6 +1,6 @@ @eval module $(gensym()) using ITensors: contract -using ITensorNetworks: ITensorNetworks, TimeDependentSum, TTN, mpo, mps, siteinds, tdvp +using ITensorNetworks: ITensorNetworks, TimeDependentSum, ttn, mpo, mps, siteinds, tdvp using OrdinaryDiffEq: Tsit5 using KrylovKit: exponentiate using LinearAlgebra: norm @@ -193,7 +193,7 @@ end ℋ⃗₀ = [ℋ₁₀, ℋ₂₀] H⃗₀ = [TTN(ℋ₀, s) for ℋ₀ in ℋ⃗₀] - ψ₀ = TTN(ComplexF64, s, v -> iseven(sum(isodd.(v))) ? "↑" : "↓") + ψ₀ = ttn(ComplexF64, s, v -> iseven(sum(isodd.(v))) ? "↑" : "↓") ψₜ_ode = tdvp( H⃗₀, diff --git a/test/test_ttno.jl b/test/test_ttno.jl index f134cf20..4a25a871 100644 --- a/test/test_ttno.jl +++ b/test/test_ttno.jl @@ -1,6 +1,6 @@ @eval module $(gensym()) using Graphs: vertices -using ITensorNetworks: TTN, contract, ortho_center, siteinds, union_all_inds +using ITensorNetworks: ttn, contract, ortho_center, siteinds, union_all_inds using ITensors: @disable_warn_order, prime, randomITensor using LinearAlgebra: norm using NamedGraphs: named_comb_tree @@ -26,15 +26,15 @@ using Test: @test, @testset # create random ITensor with these indices O = randomITensor(sites_o...) # dense TTN constructor from IndsNetwork - @disable_warn_order o1 = TTN(O, is_isp; cutoff) + @disable_warn_order o1 = ttn(O, is_isp; cutoff) # dense TTN constructor from Vector{Vector{Index}} and NamedDimGraph - @disable_warn_order o2 = TTN(O, sites_o, c; vertex_order, cutoff) + @disable_warn_order o2 = ttn(O, sites_o, c; vertex_order, cutoff) # convert to array with proper index order AO = Array(O, sites_o...) # dense array constructor from IndsNetwork - @disable_warn_order o3 = TTN(AO, is_isp; vertex_order, cutoff) + @disable_warn_order o3 = ttn(AO, is_isp; vertex_order, cutoff) # dense array constructor from Vector{Vector{Index}} and NamedDimGraph - @disable_warn_order o4 = TTN(AO, sites_o, c; vertex_order, cutoff) + @disable_warn_order o4 = ttn(AO, sites_o, c; vertex_order, cutoff) # see if this actually worked root_vertex = only(ortho_center(o1)) @disable_warn_order begin diff --git a/test/test_ttns.jl b/test/test_ttns.jl index 81bc9760..5c7cece6 100644 --- a/test/test_ttns.jl +++ b/test/test_ttns.jl @@ -1,7 +1,7 @@ @eval module $(gensym()) using DataGraphs: vertex_data using Graphs: vertices -using ITensorNetworks: TTN, contract, ortho_center, siteinds +using ITensorNetworks: ttn, contract, ortho_center, siteinds using ITensors: @disable_warn_order, randomITensor using LinearAlgebra: norm using NamedGraphs: named_comb_tree @@ -25,15 +25,15 @@ using Test: @test, @testset # create random ITensor with these indices S = randomITensor(vertex_data(is)...) # dense TTN constructor from IndsNetwork - @disable_warn_order s1 = TTN(S, is; cutoff) + @disable_warn_order s1 = ttn(S, is; cutoff) # dense TTN constructor from Vector{Index} and NamedDimGraph - @disable_warn_order s2 = TTN(S, sites_s, c; vertex_order, cutoff) + @disable_warn_order s2 = ttn(S, sites_s, c; vertex_order, cutoff) # convert to array with proper index order @disable_warn_order AS = Array(S, sites_s...) # dense array constructor from IndsNetwork - @disable_warn_order s3 = TTN(AS, is; vertex_order, cutoff) + @disable_warn_order s3 = ttn(AS, is; vertex_order, cutoff) # dense array constructor from Vector{Index} and NamedDimGraph - @disable_warn_order s4 = TTN(AS, sites_s, c; vertex_order, cutoff) + @disable_warn_order s4 = ttn(AS, sites_s, c; vertex_order, cutoff) # see if this actually worked root_vertex = only(ortho_center(s1)) @disable_warn_order begin