Skip to content

Commit

Permalink
Named changes
Browse files Browse the repository at this point in the history
  • Loading branch information
mtfishman committed Apr 3, 2024
1 parent c7f5e4b commit a4406da
Show file tree
Hide file tree
Showing 28 changed files with 159 additions and 172 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ITensorNetworks"
uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7"
authors = ["Matthew Fishman <[email protected]> and contributors"]
version = "0.5"
version = "0.6"

[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Expand Down
4 changes: 2 additions & 2 deletions src/approx_itensornetwork/ttn_svd.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ using IterTools: partition
"""
Approximate a `partition` into an output ITensorNetwork
with the binary tree structure defined by `out_tree` by
first transforming the partition into a TTN, then truncating
first transforming the partition into a ttn, then truncating
the ttn using a sequence of SVDs.
"""
function _approx_itensornetwork_ttn_svd!(
Expand All @@ -22,7 +22,7 @@ function _approx_itensornetwork_ttn_svd!(
contraction_sequence_kwargs=contraction_sequence_kwargs,
)
end
truncate_ttn = truncate(TTN(tn); cutoff=cutoff, maxdim=maxdim, root_vertex=root)
truncate_ttn = truncate(ttn(tn); cutoff=cutoff, maxdim=maxdim, root_vertex=root)
out_tn = ITensorNetwork(truncate_ttn)
root_tensor = out_tn[root]
root_norm = norm(root_tensor)
Expand Down
2 changes: 1 addition & 1 deletion src/solvers/contract.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ function sum_contract(
)
any(ns .!= n) &&
throw(DimensionMismatch("Number of sites in different operators ($n) do not match"))
# ToDo: Write test for single-vertex TTN, this implementation has not been tested.
# ToDo: Write test for single-vertex ttn, this implementation has not been tested.
if n == 1
res = 0
for (tn1, tn2) in zip(tn1s, tn2s)
Expand Down
20 changes: 10 additions & 10 deletions src/specialitensornetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -101,41 +101,41 @@ end
"""
Build an ITensor network on a graph specified by the inds network s. Bond_dim is given by link_space and entries are randomised (normal distribution, mean 0 std 1)
"""
function randomITensorNetwork(eltype::Type, s::IndsNetwork; link_space=nothing)
function random_itensornetwork(eltype::Type, s::IndsNetwork; link_space=nothing)
return ITensorNetwork(s; link_space) do v, inds...
itensor(randn(eltype, dim(inds)...), inds...)
end
end

function randomITensorNetwork(s::IndsNetwork; link_space=nothing)
return randomITensorNetwork(Float64, s; link_space)
function random_itensornetwork(s::IndsNetwork; link_space=nothing)
return random_itensornetwork(Float64, s; link_space)
end

@traitfn function randomITensorNetwork(
@traitfn function random_itensornetwork(
eltype::Type, g::::IsUnderlyingGraph; link_space=nothing
)
return randomITensorNetwork(eltype, IndsNetwork(g); link_space)
return random_itensornetwork(eltype, IndsNetwork(g); link_space)
end

@traitfn function randomITensorNetwork(g::::IsUnderlyingGraph; link_space=nothing)
return randomITensorNetwork(Float64, IndsNetwork(g); link_space)
@traitfn function random_itensornetwork(g::::IsUnderlyingGraph; link_space=nothing)
return random_itensornetwork(Float64, IndsNetwork(g); link_space)
end

"""
Build an ITensor network on a graph specified by the inds network s.
Bond_dim is given by link_space and entries are randomized.
The random distribution is based on the input argument `distribution`.
"""
function randomITensorNetwork(
function random_itensornetwork(
distribution::Distribution, s::IndsNetwork; link_space=nothing
)
return ITensorNetwork(s; link_space) do v, inds...
itensor(rand(distribution, dim(inds)...), inds...)
end
end

@traitfn function randomITensorNetwork(
@traitfn function random_itensornetwork(
distribution::Distribution, g::::IsUnderlyingGraph; link_space=nothing
)
return randomITensorNetwork(distribution, IndsNetwork(g); link_space)
return random_itensornetwork(distribution, IndsNetwork(g); link_space)
end
2 changes: 1 addition & 1 deletion src/treetensornetworks/abstracttreetensornetwork.jl
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ function Base.:+(
@assert all-> nv(first(ψs)) == nv(ψ), ψs)

# Output state
ϕ = TTN(siteinds(ψs[1]))
ϕ = ttn(siteinds(ψs[1]))

vs = post_order_dfs_vertices(ϕ, root_vertex)
es = post_order_dfs_edges(ϕ, root_vertex)
Expand Down
38 changes: 19 additions & 19 deletions src/treetensornetworks/opsum_to_ttn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ function ttn_svd(
link_space[e] = Index(qi...; tags=edge_tag(e), dir=linkdir_ref)
end

H = TTN(sites0) # initialize TTN without the dummy indices added
H = ttn(sites0) # initialize TTN without the dummy indices added
function qnblock(i::Index, q::QN)
for b in 2:(nblocks(i) - 1)
flux(i, Block(b)) == q && return b
Expand Down Expand Up @@ -496,12 +496,12 @@ function sorteachterm(os::OpSum, sites::IndsNetwork{V,<:Index}, root_vertex::V)
end

"""
TTN(os::OpSum, sites::IndsNetwork{<:Index}; kwargs...)
TTN(eltype::Type{<:Number}, os::OpSum, sites::IndsNetwork{<:Index}; kwargs...)
ttn(os::OpSum, sites::IndsNetwork{<:Index}; kwargs...)
ttn(eltype::Type{<:Number}, os::OpSum, sites::IndsNetwork{<:Index}; kwargs...)
Convert an OpSum object `os` to a TreeTensorNetwork, with indices given by `sites`.
"""
function TTN(
function ttn(
os::OpSum,
sites::IndsNetwork;
root_vertex=default_root_vertex(sites),
Expand Down Expand Up @@ -530,37 +530,37 @@ function TTN(
end

function mpo(os::OpSum, external_inds::Vector; kwargs...)
return TTN(os, path_indsnetwork(external_inds); kwargs...)
return ttn(os, path_indsnetwork(external_inds); kwargs...)
end

# Conversion from other formats
function TTN(o::Op, s::IndsNetwork; kwargs...)
return TTN(OpSum{Float64}() + o, s; kwargs...)
function ttn(o::Op, s::IndsNetwork; kwargs...)
return ttn(OpSum{Float64}() + o, s; kwargs...)
end

function TTN(o::Scaled{C,Op}, s::IndsNetwork; kwargs...) where {C}
return TTN(OpSum{C}() + o, s; kwargs...)
function ttn(o::Scaled{C,Op}, s::IndsNetwork; kwargs...) where {C}
return ttn(OpSum{C}() + o, s; kwargs...)
end

function TTN(o::Sum{Op}, s::IndsNetwork; kwargs...)
return TTN(OpSum{Float64}() + o, s; kwargs...)
function ttn(o::Sum{Op}, s::IndsNetwork; kwargs...)
return ttn(OpSum{Float64}() + o, s; kwargs...)
end

function TTN(o::Prod{Op}, s::IndsNetwork; kwargs...)
return TTN(OpSum{Float64}() + o, s; kwargs...)
function ttn(o::Prod{Op}, s::IndsNetwork; kwargs...)
return ttn(OpSum{Float64}() + o, s; kwargs...)
end

function TTN(o::Scaled{C,Prod{Op}}, s::IndsNetwork; kwargs...) where {C}
return TTN(OpSum{C}() + o, s; kwargs...)
function ttn(o::Scaled{C,Prod{Op}}, s::IndsNetwork; kwargs...) where {C}
return ttn(OpSum{C}() + o, s; kwargs...)
end

function TTN(o::Sum{Scaled{C,Op}}, s::IndsNetwork; kwargs...) where {C}
return TTN(OpSum{C}() + o, s; kwargs...)
function ttn(o::Sum{Scaled{C,Op}}, s::IndsNetwork; kwargs...) where {C}
return ttn(OpSum{C}() + o, s; kwargs...)
end

# Catch-all for leaf eltype specification
function TTN(eltype::Type{<:Number}, os, sites::IndsNetwork; kwargs...)
return NDTensors.convert_scalartype(eltype, TTN(os, sites; kwargs...))
function ttn(eltype::Type{<:Number}, os, sites::IndsNetwork; kwargs...)
return NDTensors.convert_scalartype(eltype, ttn(os, sites; kwargs...))
end

#
Expand Down
61 changes: 24 additions & 37 deletions src/treetensornetworks/ttn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,16 @@ end

const TTN = TreeTensorNetwork

function ttn(itensor_network::ITensorNetwork, ortho_center::Vector)
return ttn(itensor_network, ortho_center)
end

function data_graph_type(G::Type{<:TTN})
return data_graph_type(fieldtype(G, :itensor_network))
end

function Base.copy::TTN)
return TTN(copy.itensor_network), copy.ortho_center))
return ttn(copy.itensor_network), copy.ortho_center))
end

# Field access
Expand All @@ -44,36 +48,36 @@ data_graph(ψ::TTN) = data_graph(itensor_network(ψ))
# Constructor
#

TTN(tn::ITensorNetwork, args...) = TTN{vertextype(tn)}(tn, args...)
ttn(tn::ITensorNetwork, args...) = TTN{vertextype(tn)}(tn, args...)

# catch-all for default ElType
function TTN(g::AbstractGraph, args...; kwargs...)
return TTN(Float64, g, args...; kwargs...)
function ttn(g::AbstractGraph, args...; kwargs...)
return ttn(Float64, g, args...; kwargs...)
end

function TTN(eltype::Type{<:Number}, graph::AbstractGraph, args...; kwargs...)
function ttn(eltype::Type{<:Number}, graph::AbstractGraph, args...; kwargs...)
itensor_network = ITensorNetwork(eltype, graph; kwargs...)
return TTN(itensor_network, args...)
return ttn(itensor_network, args...)
end

# construct from given state (map)
function TTN(::Type{ElT}, is::AbstractIndsNetwork, initstate, args...) where {ElT<:Number}
function ttn(::Type{ElT}, is::AbstractIndsNetwork, initstate, args...) where {ElT<:Number}
itensor_network = ITensorNetwork(ElT, is, initstate)
return TTN(itensor_network, args...)
return ttn(itensor_network, args...)
end

# Constructor from a collection of ITensors.
# TODO: Support other collections like `Dictionary`,
# interface for custom vertex names.
function TTN(ts::ITensorCollection)
return TTN(ITensorNetwork(ts))
function ttn(ts::ITensorCollection)
return ttn(ITensorNetwork(ts))
end

# TODO: Implement `random_circuit_ttn` for non-trivial
# bond dimensions and correlations.
# TODO: Implement random_ttn for QN-Index
function random_ttn(args...; kwargs...)
T = TTN(args...; kwargs...)
T = ttn(args...; kwargs...)
randn!.(vertex_data(T))
normalize!.(vertex_data(T))
return T
Expand All @@ -91,14 +95,14 @@ function random_mps(
else
randomMPS(external_inds, states; linkdims=internal_inds_space)
end
return TTN([tn_mps[v] for v in eachindex(tn_mps)])
return ttn([tn_mps[v] for v in eachindex(tn_mps)])
end

#
# Construction from operator (map)
#

function TTN(
function ttn(
::Type{ElT},
sites_map::Pair{<:AbstractIndsNetwork,<:AbstractIndsNetwork},
ops::Dictionary;
Expand All @@ -110,7 +114,7 @@ function TTN(
for v in vertices(sites)
os *= Op(ops[v], v)
end
T = TTN(ElT, os, sites; kwargs...)
T = ttn(ElT, os, sites; kwargs...)
# see https://github.com/ITensor/ITensors.jl/issues/526
lognormT = lognorm(T)
T /= exp(lognormT / N) # TODO: fix broadcasting for in-place assignment
Expand All @@ -119,26 +123,26 @@ function TTN(
return T
end

function TTN(
function ttn(
::Type{ElT},
sites_map::Pair{<:AbstractIndsNetwork,<:AbstractIndsNetwork},
fops::Function;
kwargs...,
) where {ElT<:Number}
sites = first(sites_map) # TODO: Use the sites_map
ops = Dictionary(vertices(sites), map(v -> fops(v), vertices(sites)))
return TTN(ElT, sites, ops; kwargs...)
return ttn(ElT, sites, ops; kwargs...)
end

function TTN(
function ttn(
::Type{ElT},
sites_map::Pair{<:AbstractIndsNetwork,<:AbstractIndsNetwork},
op::String;
kwargs...,
) where {ElT<:Number}
sites = first(sites_map) # TODO: Use the sites_map
ops = Dictionary(vertices(sites), fill(op, nv(sites)))
return TTN(ElT, sites, ops; kwargs...)
return ttn(ElT, sites, ops; kwargs...)
end

# Special constructors
Expand All @@ -156,25 +160,8 @@ function mps(external_inds::Vector{<:Vector{<:Index}}; states)
tn = insert_missing_internal_inds(
tn, edges(g); internal_inds_space=trivial_space(indtype(external_inds))
)
return TTN(tn)
end

## function mps(external_inds::Vector{<:Index}; states)
## is = path_indsnetwork(external_inds)
## tn = TTN(underlying_graph(is))
## tn = insert_missing_internal_inds(tn, trivial_space(indtype(is)))
## for v in vertices(tn)
## @show v
## @show tn[v]
## tn[v] *= state(only(is[v]), states(v))
## @show tn[v]
## end
## return tn
## end

## function productTTN(args...; kwargs...)
## return TTN(args...; link_space=1, kwargs...)
## end
return ttn(tn)
end

#
# Utility
Expand Down
6 changes: 3 additions & 3 deletions test/test_additensornetworks.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
@eval module $(gensym())
using Graphs: rem_edge!, vertices
using NamedGraphs: NamedEdge, hexagonal_lattice_graph, named_grid
using ITensorNetworks: ITensorNetwork, inner_network, randomITensorNetwork, siteinds
using ITensorNetworks: ITensorNetwork, inner_network, random_itensornetwork, siteinds
using ITensors: ITensors, apply, op
using Random: Random
using Test: @test, @testset
Expand Down Expand Up @@ -32,8 +32,8 @@ using Test: @test, @testset
rem_edge!(s2, NamedEdge((1, 1) => (1, 2)))

v = rand(vertices(g))
ψ1 = randomITensorNetwork(s1; link_space=χ)
ψ2 = randomITensorNetwork(s2; link_space=χ)
ψ1 = random_itensornetwork(s1; link_space=χ)
ψ2 = random_itensornetwork(s2; link_space=χ)

ψ12 = ψ1 + ψ2

Expand Down
4 changes: 2 additions & 2 deletions test/test_apply.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ using ITensorNetworks:
contract_inner,
environment,
norm_network,
randomITensorNetwork,
random_itensornetwork,
siteinds,
update
using ITensors: ITensors
Expand All @@ -25,7 +25,7 @@ using Test: @test, @testset
g = named_grid(g_dims)
s = siteinds("S=1/2", g)
χ = 2
ψ = randomITensorNetwork(s; link_space=χ)
ψ = random_itensornetwork(s; link_space=χ)
v1, v2 = (2, 2), (1, 2)
ψψ = norm_network(ψ)

Expand Down
Loading

0 comments on commit a4406da

Please sign in to comment.