Skip to content

Commit

Permalink
Merge branch 'main' into normalize_support_bp_update
Browse files Browse the repository at this point in the history
  • Loading branch information
JoeyT1994 authored Aug 8, 2024
2 parents f221b70 + 806d897 commit 5bcb060
Show file tree
Hide file tree
Showing 39 changed files with 437 additions and 325 deletions.
12 changes: 9 additions & 3 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ITensorNetworks"
uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7"
authors = ["Matthew Fishman <[email protected]>, Joseph Tindall <[email protected]> and contributors"]
version = "0.11.7"
version = "0.11.15"

[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Expand All @@ -19,6 +19,7 @@ IsApprox = "28f27b66-4bd8-47e7-9110-e2746eb8bed7"
IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e"
KrylovKit = "0b1a1467-8014-51b9-945f-bf0ae24f4b77"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
NamedGraphs = "678767b0-92e7-4007-89e4-4527a8725b19"
PackageExtensionCompat = "65ce6f38-6b18-4e1d-a461-8949797d7930"
Expand All @@ -35,19 +36,22 @@ TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
TupleTools = "9d95972d-f1c8-5527-a6e0-b4b365fa01f6"

[weakdeps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
GraphsFlows = "06909019-6f44-4949-96fc-b9d9aaa02889"
OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715"
Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0"

[extensions]
ITensorNetworksAdaptExt = "Adapt"
ITensorNetworksEinExprsExt = "EinExprs"
ITensorNetworksGraphsFlowsExt = "GraphsFlows"
ITensorNetworksOMEinsumContractionOrdersExt = "OMEinsumContractionOrders"
ITensorNetworksObserversExt = "Observers"

[compat]
AbstractTrees = "0.4.4"
Adapt = "4"
Combinatorics = "1"
Compat = "3, 4"
DataGraphs = "0.2.3"
Expand All @@ -58,11 +62,12 @@ DocStringExtensions = "0.9"
EinExprs = "0.6.4"
Graphs = "1.8"
GraphsFlows = "0.1.1"
ITensorMPS = "0.1"
ITensors = "0.4, 0.5, 0.6"
ITensorMPS = "0.2.2"
ITensors = "0.6.8"
IsApprox = "0.1"
IterTools = "1.4.0"
KrylovKit = "0.6, 0.7"
MacroTools = "0.5"
NDTensors = "0.3"
NamedGraphs = "0.6.0"
OMEinsumContractionOrders = "0.8.3"
Expand All @@ -80,6 +85,7 @@ TupleTools = "1.4"
julia = "1.10"

[extras]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
GraphsFlows = "06909019-6f44-4949-96fc-b9d9aaa02889"
OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715"
Expand Down
28 changes: 0 additions & 28 deletions TODO.md

This file was deleted.

14 changes: 14 additions & 0 deletions ext/ITensorNetworksAdaptExt/ITensorNetworksAdaptExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
module ITensorNetworksAdaptExt
using Adapt: Adapt, adapt
using ITensorNetworks: AbstractITensorNetwork, map_vertex_data_preserve_graph
function Adapt.adapt_structure(to, tn::AbstractITensorNetwork)
# TODO: Define and use:
#
# @preserve_graph map_vertex_data(adapt(to), tn)
#
# or just:
#
# @preserve_graph map(adapt(to), tn)
return map_vertex_data_preserve_graph(adapt(to), tn)
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ Returns a [`NestedEinsum`](@ref) instance.
```jldoctest
julia> using ITensors, ITensorContractionOrders
julia> i, j, k, l = Index(4), Index(5), Index(6), Index(7);
julia> x, y, z = randomITensor(i, j), randomITensor(j, k), randomITensor(k, l);
julia> x, y, z = random_itensor(i, j), random_itensor(j, k), random_itensor(k, l);
julia> net = optimize_contraction([x, y, z]; optimizer=TreeSA());
```
"""
Expand Down
74 changes: 55 additions & 19 deletions src/abstractitensornetwork.jl
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ using ITensors:
using ITensorMPS: ITensorMPS, add, linkdim, linkinds, siteinds
using .ITensorsExtensions: ITensorsExtensions, indtype, promote_indtype
using LinearAlgebra: LinearAlgebra, factorize
using MacroTools: @capture
using NamedGraphs: NamedGraphs, NamedGraph, not_implemented
using NamedGraphs.GraphsExtensions:
, directed_graph, incident_edges, rename_vertices, vertextype
Expand Down Expand Up @@ -138,6 +139,30 @@ function setindex_preserve_graph!(tn::AbstractITensorNetwork, value, vertex)
return tn
end

# TODO: Move to `BaseExtensions` module.
function is_setindex!_expr(expr::Expr)
return is_assignment_expr(expr) && is_getindex_expr(first(expr.args))
end
is_setindex!_expr(x) = false
is_getindex_expr(expr::Expr) = (expr.head === :ref)
is_getindex_expr(x) = false
is_assignment_expr(expr::Expr) = (expr.head === :(=))
is_assignment_expr(expr) = false

# TODO: Define this in terms of a function mapping
# preserve_graph_function(::typeof(setindex!)) = setindex!_preserve_graph
# preserve_graph_function(::typeof(map_vertex_data)) = map_vertex_data_preserve_graph
# Also allow annotating codeblocks like `@views`.
macro preserve_graph(expr)
if !is_setindex!_expr(expr)
error(
"preserve_graph must be used with setindex! syntax (as @preserve_graph a[i,j,...] = value)",
)
end
@capture(expr, array_[indices__] = value_)
return :(setindex_preserve_graph!($(esc(array)), $(esc(value)), $(esc.(indices)...)))
end

function ITensors.hascommoninds(tn::AbstractITensorNetwork, edge::Pair)
return hascommoninds(tn, edgetype(tn)(edge))
end
Expand All @@ -148,7 +173,7 @@ end

function Base.setindex!(tn::AbstractITensorNetwork, value, v)
# v = to_vertex(tn, index...)
setindex_preserve_graph!(tn, value, v)
@preserve_graph tn[v] = value
for edge in incident_edges(tn, v)
rem_edge!(tn, edge)
end
Expand Down Expand Up @@ -297,12 +322,12 @@ function ITensors.replaceinds(
@assert underlying_graph(is) == underlying_graph(is′)
for v in vertices(is)
isassigned(is, v) || continue
setindex_preserve_graph!(tn, replaceinds(tn[v], is[v] => is′[v]), v)
@preserve_graph tn[v] = replaceinds(tn[v], is[v] => is′[v])
end
for e in edges(is)
isassigned(is, e) || continue
for v in (src(e), dst(e))
setindex_preserve_graph!(tn, replaceinds(tn[v], is[e] => is′[e]), v)
@preserve_graph tn[v] = replaceinds(tn[v], is[e] => is′[e])
end
end
return tn
Expand Down Expand Up @@ -361,13 +386,31 @@ end

LinearAlgebra.adjoint(tn::Union{IndsNetwork,AbstractITensorNetwork}) = prime(tn)

#dag(tn::AbstractITensorNetwork) = map_vertex_data(dag, tn)
function ITensors.dag(tn::AbstractITensorNetwork)
tndag = copy(tn)
for v in vertices(tndag)
setindex_preserve_graph!(tndag, dag(tndag[v]), v)
function map_vertex_data(f, tn::AbstractITensorNetwork)
tn = copy(tn)
for v in vertices(tn)
tn[v] = f(tn[v])
end
return tndag
return tn
end

# TODO: Define `@preserve_graph map_vertex_data(f, tn)`
function map_vertex_data_preserve_graph(f, tn::AbstractITensorNetwork)
tn = copy(tn)
for v in vertices(tn)
@preserve_graph tn[v] = f(tn[v])
end
return tn
end

function Base.conj(tn::AbstractITensorNetwork)
# TODO: Use `@preserve_graph map_vertex_data(f, tn)`
return map_vertex_data_preserve_graph(conj, tn)
end

function ITensors.dag(tn::AbstractITensorNetwork)
# TODO: Use `@preserve_graph map_vertex_data(f, tn)`
return map_vertex_data_preserve_graph(dag, tn)
end

# TODO: should this make sure that internal indices
Expand Down Expand Up @@ -442,9 +485,7 @@ function NDTensors.contract(
for n_dst in neighbors_dst
add_edge!(tn, merged_vertex => n_dst)
end

setindex_preserve_graph!(tn, new_itensor, merged_vertex)

@preserve_graph tn[merged_vertex] = new_itensor
return tn
end

Expand Down Expand Up @@ -533,13 +574,8 @@ function LinearAlgebra.factorize(
add_edge!(tn, X_vertex => nX)
end
add_edge!(tn, Y_vertex => dst(edge))

# tn[X_vertex] = X
setindex_preserve_graph!(tn, X, X_vertex)

# tn[Y_vertex] = Y
setindex_preserve_graph!(tn, Y, Y_vertex)

@preserve_graph tn[X_vertex] = X
@preserve_graph tn[Y_vertex] = Y
return tn
end

Expand Down
26 changes: 14 additions & 12 deletions src/caches/beliefpropagationcache.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using Graphs: IsDirected
using SplitApplyCombine: group
using LinearAlgebra: diag
using LinearAlgebra: diag, dot
using ITensors: dir
using ITensorMPS: ITensorMPS
using NamedGraphs.PartitionedGraphs:
Expand All @@ -12,15 +12,16 @@ using NamedGraphs.PartitionedGraphs:
partitionedges,
unpartitioned_graph
using SimpleTraits: SimpleTraits, Not, @traitfn
using NDTensors: NDTensors

default_message(inds_e) = ITensor[denseblocks(delta(i)) for i in inds_e]
default_message(elt, inds_e) = ITensor[denseblocks(delta(elt, i)) for i in inds_e]
default_messages(ptn::PartitionedGraph) = Dictionary()
default_message_norm(m::ITensor) = norm(m)
function default_message_update(contract_list::Vector{ITensor}; normalize=true, kwargs...)
function default_message_update(contract_list::Vector{ITensor}; normalize = true, kwargs...)
sequence = optimal_contraction_sequence(contract_list)
updated_messages = contract(contract_list; sequence, kwargs...)
if normalize
updated_messages /= norm(updated_messages)
if normalize && !iszero(norm(updated_messages))
updated_messages /= message_norm
end
return ITensor[updated_messages]
end
Expand All @@ -32,17 +33,16 @@ default_partitioned_vertices(ψ::AbstractITensorNetwork) = group(v -> v, vertice
function default_partitioned_vertices(f::AbstractFormNetwork)
return group(v -> original_state_vertex(f, v), vertices(f))
end
default_cache_update_kwargs(cache) = (; maxiter=20, tol=1e-5)
default_cache_update_kwargs(cache) = (; maxiter=25, tol=1e-8)
function default_cache_construction_kwargs(alg::Algorithm"bp", ψ::AbstractITensorNetwork)
return (; partitioned_vertices=default_partitioned_vertices(ψ))
end

function message_diff(
message_a::Vector{ITensor}, message_b::Vector{ITensor}; message_norm=default_message_norm
)
#TODO: Take `dot` without precontracting the messages to allow scaling to more complex messages
function message_diff(message_a::Vector{ITensor}, message_b::Vector{ITensor})
lhs, rhs = contract(message_a), contract(message_b)
norm_lhs, norm_rhs = message_norm(lhs), message_norm(rhs)
return 0.5 * norm((denseblocks(lhs) / norm_lhs) - (denseblocks(rhs) / norm_rhs))
f = abs2(dot(lhs / norm(lhs), rhs / norm(rhs)))
return 1 - f
end

struct BeliefPropagationCache{PTN,MTS,DM}
Expand Down Expand Up @@ -98,8 +98,10 @@ for f in [
end
end

NDTensors.scalartype(bp_cache) = scalartype(tensornetwork(bp_cache))

function default_message(bp_cache::BeliefPropagationCache, edge::PartitionEdge)
return default_message(bp_cache)(linkinds(bp_cache, edge))
return default_message(bp_cache)(scalartype(bp_cache), linkinds(bp_cache, edge))
end

function message(bp_cache::BeliefPropagationCache, edge::PartitionEdge)
Expand Down
1 change: 1 addition & 0 deletions src/contract.jl
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ function logscalar(
denominator_terms
end

any(iszero, denominator_terms) && return -Inf
return sum(log.(numerator_terms)) - sum(log.((denominator_terms)))
end

Expand Down
4 changes: 2 additions & 2 deletions src/lib/ModelNetworks/src/ModelNetworks.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
module ModelNetworks
using Graphs: degree, dst, edges, src
using ..ITensorNetworks: IndsNetwork, delta_network, insert_linkinds, itensor
using ITensors: commoninds, diagITensor, inds, noprime
using ITensors: commoninds, diag_itensor, inds, noprime
using LinearAlgebra: Diagonal, eigen
using NamedGraphs: NamedGraph

Expand All @@ -21,7 +21,7 @@ function ising_network(
tn = delta_network(eltype, s)
if (szverts != nothing)
for v in szverts
tn[v] = diagITensor(eltype[1, -1], inds(tn[v]))
tn[v] = diag_itensor(eltype[1, -1], inds(tn[v]))
end
end
for edge in edges(tn)
Expand Down
4 changes: 2 additions & 2 deletions src/mpo_mps_compatibility.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ function ITensorMPS.MPO(opsum_sum::Sum{<:OpSum}, s::IndsNetwork)
return ITensorMPS.MPO(sum(Ops.terms(opsum_sum)), s)
end

function ITensorMPS.randomMPS(s::IndsNetwork, args...; kwargs...)
function ITensorMPS.random_mps(s::IndsNetwork, args...; kwargs...)
s_linear = [only(s[v]) for v in 1:nv(s)]
return ITensorMPS.randomMPS(s_linear, args...; kwargs...)
return ITensorMPS.random_mps(s_linear, args...; kwargs...)
end

function ITensorMPS.MPS(s::IndsNetwork, args...; kwargs...)
Expand Down
25 changes: 16 additions & 9 deletions src/sitetype.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,25 @@ function ITensors.siteind(d::Integer, v; addtags="", kwargs...)
return ITensors.addtags(Index(d; tags="Site, $addtags", kwargs...), vertex_tag(v))
end

function ITensors.siteinds(sitetypes::AbstractDictionary, g::AbstractGraph; kwargs...)
is = IndsNetwork(g)
for v in vertices(g)
is[v] = [siteind(sitetypes[v], vertex_tag(v); kwargs...)]
end
return is
to_siteinds_callable(x) = Returns(x)
function to_siteinds_callable(x::AbstractDictionary)
return Base.Fix1(getindex, x) keytype(x)
end

function ITensors.siteinds(x, g::AbstractGraph; kwargs...)
return siteinds(to_siteinds_callable(x), g; kwargs...)
end

function ITensors.siteinds(sitetype, g::AbstractGraph; kwargs...)
return siteinds(Dictionary(vertices(g), fill(sitetype, nv(g))), g; kwargs...)
function to_siteind(x, vertex; kwargs...)
return [siteind(x, vertex_tag(vertex); kwargs...)]
end

to_siteind(x::Index, vertex; kwargs...) = [x]

function ITensors.siteinds(f::Function, g::AbstractGraph; kwargs...)
return siteinds(Dictionary(vertices(g), map(v -> f(v), vertices(g))), g; kwargs...)
is = IndsNetwork(g)
for v in vertices(g)
is[v] = to_siteind(f(v), v; kwargs...)
end
return is
end
Loading

0 comments on commit 5bcb060

Please sign in to comment.