Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into normalize!
Browse files Browse the repository at this point in the history
  • Loading branch information
JoeyT1994 committed Dec 10, 2024
2 parents 2cb7f85 + 70a3f7e commit 73e9e1e
Show file tree
Hide file tree
Showing 22 changed files with 281 additions and 252 deletions.
14 changes: 7 additions & 7 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ITensorNetworks"
uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7"
authors = ["Matthew Fishman <[email protected]>, Joseph Tindall <[email protected]> and contributors"]
version = "0.11.15"
version = "0.11.24"

[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Expand Down Expand Up @@ -62,20 +62,20 @@ DocStringExtensions = "0.9"
EinExprs = "0.6.4"
Graphs = "1.8"
GraphsFlows = "0.1.1"
ITensorMPS = "0.2.2"
ITensors = "0.6.8"
IsApprox = "0.1"
ITensorMPS = "0.3"
ITensors = "0.7"
IsApprox = "0.1, 1, 2"
IterTools = "1.4.0"
KrylovKit = "0.6, 0.7"
KrylovKit = "0.6, 0.7, 0.8"
MacroTools = "0.5"
NDTensors = "0.3"
NamedGraphs = "0.6.0"
OMEinsumContractionOrders = "0.8.3"
OMEinsumContractionOrders = "0.8.3, 0.9"
Observers = "0.2.4"
PackageExtensionCompat = "1"
SerializedElementArrays = "0.1"
SimpleTraits = "0.9"
SparseArrayKit = "0.3"
SparseArrayKit = "0.3, 0.4"
SplitApplyCombine = "1.2"
StaticArrays = "1.5.12"
StructWalk = "0.2"
Expand Down
144 changes: 72 additions & 72 deletions README.md

Large diffs are not rendered by default.

21 changes: 21 additions & 0 deletions examples/test.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
using ITensorNetworks: IndsNetwork, siteinds, ttn
using ITensorNetworks.ModelHamiltonians: ising
using ITensors: Index, OpSum, terms, sites
using NamedGraphs.NamedGraphGenerators: named_grid
using NamedGraphs.GraphsExtensions: rem_vertex

function filter_terms(H, verts)
H_new = OpSum()
for term in terms(H)
if isempty(filter(v -> v verts, sites(term)))
H_new += term
end
end
return H_new
end

g = named_grid((8,1))
s = siteinds("S=1/2", g)
H = ising(s)
H_mod = filter_terms(H, [(4,1)])
ttno = ttn(H_mod, s)
56 changes: 40 additions & 16 deletions src/abstractitensornetwork.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ using Graphs:
add_edge!,
add_vertex!,
bfs_tree,
center,
dst,
edges,
edgetype,
Expand All @@ -18,6 +19,7 @@ using Graphs:
using ITensors:
ITensors,
ITensor,
@Algorithm_str,
addtags,
combiner,
commoninds,
Expand All @@ -40,10 +42,10 @@ using ITensorMPS: ITensorMPS, add, linkdim, linkinds, siteinds
using .ITensorsExtensions: ITensorsExtensions, indtype, promote_indtype
using LinearAlgebra: LinearAlgebra, factorize
using MacroTools: @capture
using NamedGraphs: NamedGraphs, NamedGraph, not_implemented
using NamedGraphs: NamedGraphs, NamedGraph, not_implemented, steiner_tree
using NamedGraphs.GraphsExtensions:
, directed_graph, incident_edges, rename_vertices, vertextype
using NDTensors: NDTensors, dim
using NDTensors: NDTensors, dim, Algorithm
using SplitApplyCombine: flatten

abstract type AbstractITensorNetwork{V} <: AbstractDataGraph{V,ITensor,ITensor} end
Expand Down Expand Up @@ -584,7 +586,9 @@ function LinearAlgebra.factorize(tn::AbstractITensorNetwork, edge::Pair; kwargs.
end

# For ambiguity error; TODO: decide whether to use graph mutating methods when resulting graph is unchanged?
function _orthogonalize_edge(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...)
function gauge_edge(
alg::Algorithm"orthogonalize", tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...
)
# tn = factorize(tn, edge; kwargs...)
# # TODO: Implement as `only(common_neighbors(tn, src(edge), dst(edge)))`
# new_vertex = only(neighbors(tn, src(edge)) ∩ neighbors(tn, dst(edge)))
Expand All @@ -598,23 +602,43 @@ function _orthogonalize_edge(tn::AbstractITensorNetwork, edge::AbstractEdge; kwa
return tn
end

function ITensorMPS.orthogonalize(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...)
return _orthogonalize_edge(tn, edge; kwargs...)
# For ambiguity error; TODO: decide whether to use graph mutating methods when resulting graph is unchanged?
function gauge_walk(
alg::Algorithm, tn::AbstractITensorNetwork, edges::Vector{<:AbstractEdge}; kwargs...
)
tn = copy(tn)
for edge in edges
tn = gauge_edge(alg, tn, edge; kwargs...)
end
return tn
end

function gauge_walk(alg::Algorithm, tn::AbstractITensorNetwork, edge::Pair; kwargs...)
return gauge_edge(alg::Algorithm, tn, edgetype(tn)(edge); kwargs...)
end

function ITensorMPS.orthogonalize(tn::AbstractITensorNetwork, edge::Pair; kwargs...)
return orthogonalize(tn, edgetype(tn)(edge); kwargs...)
function gauge_walk(
alg::Algorithm, tn::AbstractITensorNetwork, edges::Vector{<:Pair}; kwargs...
)
return gauge_walk(alg, tn, edgetype(tn).(edges); kwargs...)
end

# Orthogonalize an ITensorNetwork towards a source vertex, treating
# Gauge a ITensorNetwork towards a region, treating
# the network as a tree spanned by a spanning tree.
# TODO: Rename `tree_orthogonalize`.
function ITensorMPS.orthogonalize::AbstractITensorNetwork, source_vertex)
spanning_tree_edges = post_order_dfs_edges(bfs_tree(ψ, source_vertex), source_vertex)
for e in spanning_tree_edges
ψ = orthogonalize(ψ, e)
end
return ψ
function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, region::Vector)
region_center =
length(region) != 1 ? first(center(steiner_tree(ψ, region))) : only(region)
path = post_order_dfs_edges(bfs_tree(ψ, region_center), region_center)
path = filter(e -> !((src(e) region) && (dst(e) region)), path)
return gauge_walk(alg, ψ, path)
end

function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, region)
return tree_gauge(alg, ψ, [region])
end

function tree_orthogonalize::AbstractITensorNetwork, region; kwargs...)
return tree_gauge(Algorithm("orthogonalize"), ψ, region; kwargs...)
end

# TODO: decide whether to use graph mutating methods when resulting graph is unchanged?
Expand Down Expand Up @@ -759,7 +783,7 @@ end
# Link dimensions
#

function ITensors.maxlinkdim(tn::AbstractITensorNetwork)
function ITensorMPS.maxlinkdim(tn::AbstractITensorNetwork)
md = 1
for e in edges(tn)
md = max(md, linkdim(tn, e))
Expand Down
4 changes: 2 additions & 2 deletions src/apply.jl
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ function ITensors.apply(
v⃗ = neighbor_vertices(ψ, o)
if length(v⃗) == 1
if ortho
ψ = orthogonalize(ψ, v⃗[1])
ψ = tree_orthogonalize(ψ, v⃗[1])
end
oψᵥ = apply(o, ψ[v⃗[1]])
if normalize
Expand All @@ -215,7 +215,7 @@ function ITensors.apply(
error("Vertices where the gates are being applied must be neighbors for now.")
end
if ortho
ψ = orthogonalize(ψ, v⃗[1])
ψ = tree_orthogonalize(ψ, v⃗[1])
end
if variational_optimization_only || !is_product_env
ψᵥ₁, ψᵥ₂ = full_update_bp(
Expand Down
19 changes: 5 additions & 14 deletions src/caches/beliefpropagationcache.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ using NDTensors: NDTensors

default_message(elt, inds_e) = ITensor[denseblocks(delta(elt, i)) for i in inds_e]
default_messages(ptn::PartitionedGraph) = Dictionary()
default_message_norm(m::ITensor) = norm(m)
function default_message_update(contract_list::Vector{ITensor}; normalize=true, kwargs...)
sequence = optimal_contraction_sequence(contract_list)
updated_messages = contract(contract_list; sequence, kwargs...)
Expand Down Expand Up @@ -107,7 +106,7 @@ end

function message(bp_cache::BeliefPropagationCache, edge::PartitionEdge)
mts = messages(bp_cache)
return get(mts, edge, default_message(bp_cache, edge))
return get(() -> default_message(bp_cache, edge), mts, edge)
end
function messages(bp_cache::BeliefPropagationCache, edges; kwargs...)
return map(edge -> message(bp_cache, edge; kwargs...), edges)
Expand Down Expand Up @@ -153,24 +152,16 @@ end
function environment(bp_cache::BeliefPropagationCache, verts::Vector)
partition_verts = partitionvertices(bp_cache, verts)
messages = environment(bp_cache, partition_verts)
central_tensors = ITensor[
tensornetwork(bp_cache)[v] for v in setdiff(vertices(bp_cache, partition_verts), verts)
]
central_tensors = factors(bp_cache, setdiff(vertices(bp_cache, partition_verts), verts))
return vcat(messages, central_tensors)
end

function factors(bp_cache::BeliefPropagationCache, vertices)
tn = tensornetwork(bp_cache)
return map(vertex -> tn[vertex], vertices)
end

function factor(bp_cache::BeliefPropagationCache, vertex)
return only(factors(bp_cache, [vertex]))
function factors(bp_cache::BeliefPropagationCache, verts::Vector)
return ITensor[tensornetwork(bp_cache)[v] for v in verts]
end

function factor(bp_cache::BeliefPropagationCache, vertex::PartitionVertex)
ptn = partitioned_tensornetwork(bp_cache)
return collect(eachtensor(subgraph(ptn, vertex)))
return factors(bp_cache, vertices(bp_cache, vertex))
end

"""
Expand Down
15 changes: 8 additions & 7 deletions src/inner.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using ITensors: inner, scalar, loginner
using ITensors: inner, scalar
using ITensorMPS: ITensorMPS, loginner
using LinearAlgebra: norm, norm_sqr

default_contract_alg(tns::Tuple) = "bp"
Expand Down Expand Up @@ -53,7 +54,7 @@ function ITensors.inner(
return scalar(tn; sequence)
end

function ITensors.loginner(
function ITensorMPS.loginner(
ϕ::AbstractITensorNetwork,
ψ::AbstractITensorNetwork;
alg=default_contract_alg((ϕ, ψ)),
Expand All @@ -62,7 +63,7 @@ function ITensors.loginner(
return loginner(Algorithm(alg), ϕ, ψ; kwargs...)
end

function ITensors.loginner(
function ITensorMPS.loginner(
ϕ::AbstractITensorNetwork,
A::AbstractITensorNetwork,
ψ::AbstractITensorNetwork;
Expand All @@ -72,13 +73,13 @@ function ITensors.loginner(
return loginner(Algorithm(alg), ϕ, A, ψ; kwargs...)
end

function ITensors.loginner(
function ITensorMPS.loginner(
alg::Algorithm"exact", ϕ::AbstractITensorNetwork, ψ::AbstractITensorNetwork; kwargs...
)
return log(inner(alg, ϕ, ψ); kwargs...)
end

function ITensors.loginner(
function ITensorMPS.loginner(
alg::Algorithm"exact",
ϕ::AbstractITensorNetwork,
A::AbstractITensorNetwork,
Expand All @@ -88,7 +89,7 @@ function ITensors.loginner(
return log(inner(alg, ϕ, A, ψ); kwargs...)
end

function ITensors.loginner(
function ITensorMPS.loginner(
alg::Algorithm"bp",
ϕ::AbstractITensorNetwork,
ψ::AbstractITensorNetwork;
Expand All @@ -99,7 +100,7 @@ function ITensors.loginner(
return logscalar(alg, tn; kwargs...)
end

function ITensors.loginner(
function ITensorMPS.loginner(
alg::Algorithm"bp",
ϕ::AbstractITensorNetwork,
A::AbstractITensorNetwork,
Expand Down
6 changes: 3 additions & 3 deletions src/solvers/alternating_update/alternating_update.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ function alternating_update(
nsites, # define default for each level of solver implementation
updater, # this specifies the update performed locally
outputlevel=default_outputlevel(),
region_printer=nothing,
sweep_printer=nothing,
region_printer=default_region_printer,
sweep_printer=default_sweep_printer,
(sweep_observer!)=nothing,
(region_observer!)=nothing,
root_vertex=GraphsExtensions.default_root_vertex(init_state),
Expand Down Expand Up @@ -59,7 +59,7 @@ function alternating_update(
(sweep_observer!)=nothing,
sweep_printer=default_sweep_printer,#?
(region_observer!)=nothing,
region_printer=nothing,
region_printer=default_region_printer,
)
state = copy(init_state)
@assert !isnothing(sweep_plans)
Expand Down
49 changes: 3 additions & 46 deletions src/solvers/alternating_update/region_update.jl
Original file line number Diff line number Diff line change
@@ -1,44 +1,3 @@
#ToDo: generalize beyond 2-site
#ToDo: remove concept of orthogonality center for generality
function current_ortho(sweep_plan, which_region_update)
regions = first.(sweep_plan)
region = regions[which_region_update]
current_verts = support(region)
if !isa(region, AbstractEdge) && length(region) == 1
return only(current_verts)
end
if which_region_update == length(regions)
# look back by one should be sufficient, but may be brittle?
overlapping_vertex = only(
intersect(current_verts, support(regions[which_region_update - 1]))
)
return overlapping_vertex
else
# look forward
other_regions = filter(
x -> !(issetequal(x, current_verts)), support.(regions[(which_region_update + 1):end])
)
# find the first region that has overlapping support with current region
ind = findfirst(x -> !isempty(intersect(support(x), support(region))), other_regions)
if isnothing(ind)
# look backward
other_regions = reverse(
filter(
x -> !(issetequal(x, current_verts)),
support.(regions[1:(which_region_update - 1)]),
),
)
ind = findfirst(x -> !isempty(intersect(support(x), support(region))), other_regions)
end
@assert !isnothing(ind)
future_verts = union(support(other_regions[ind]))
# return ortho_ceter as the vertex in current region that does not overlap with following one
overlapping_vertex = intersect(current_verts, future_verts)
nonoverlapping_vertex = only(setdiff(current_verts, overlapping_vertex))
return nonoverlapping_vertex
end
end

function region_update(
projected_operator,
state;
Expand All @@ -64,14 +23,13 @@ function region_update(

# ToDo: remove orthogonality center on vertex for generality
# region carries same information
ortho_vertex = current_ortho(sweep_plan, which_region_update)
if !isnothing(transform_operator)
projected_operator = transform_operator(
state, projected_operator; outputlevel, transform_operator_kwargs...
)
end
state, projected_operator, phi = extracter(
state, projected_operator, region, ortho_vertex; extracter_kwargs..., internal_kwargs
state, projected_operator, region; extracter_kwargs..., internal_kwargs
)
# create references, in case solver does (out-of-place) modify PH or state
state! = Ref(state)
Expand All @@ -97,9 +55,8 @@ function region_update(
# drho = noise * noiseterm(PH, phi, ortho) # TODO: actually implement this for trees...
# so noiseterm is a solver
#end
state, spec = inserter(
state, phi, region, ortho_vertex; inserter_kwargs..., internal_kwargs
)
#if isa(region, AbstractEdge) &&
state, spec = inserter(state, phi, region; inserter_kwargs..., internal_kwargs)
all_kwargs = (;
which_region_update,
sweep_plan,
Expand Down
3 changes: 2 additions & 1 deletion src/solvers/contract.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using Graphs: nv, vertices
using ITensors: ITensors, linkinds, sim
using ITensors: ITensors, sim
using ITensorMPS: linkinds
using ITensors.NDTensors: Algorithm, @Algorithm_str, contract
using NamedGraphs: vertextype

Expand Down
Loading

0 comments on commit 73e9e1e

Please sign in to comment.