diff --git a/README.md b/README.md index 8a92276d..2f86bee0 100644 --- a/README.md +++ b/README.md @@ -105,13 +105,13 @@ and 4 edge(s): with vertex data: 4-element Dictionaries.Dictionary{Tuple{Int64, Int64}, Any} - (1, 1) │ ((dim=2|id=712|"1×1,2×1"), (dim=2|id=598|"1×1,1×2")) - (2, 1) │ ((dim=2|id=712|"1×1,2×1"), (dim=2|id=457|"2×1,2×2")) - (1, 2) │ ((dim=2|id=598|"1×1,1×2"), (dim=2|id=683|"1×2,2×2")) - (2, 2) │ ((dim=2|id=457|"2×1,2×2"), (dim=2|id=683|"1×2,2×2")) + (1, 1) │ ((dim=2|id=74|"1×1,2×1"), (dim=2|id=723|"1×1,1×2")) + (2, 1) │ ((dim=2|id=74|"1×1,2×1"), (dim=2|id=823|"2×1,2×2")) + (1, 2) │ ((dim=2|id=723|"1×1,1×2"), (dim=2|id=712|"1×2,2×2")) + (2, 2) │ ((dim=2|id=823|"2×1,2×2"), (dim=2|id=712|"1×2,2×2")) julia> tn[1, 1] -ITensor ord=2 (dim=2|id=712|"1×1,2×1") (dim=2|id=598|"1×1,1×2") +ITensor ord=2 (dim=2|id=74|"1×1,2×1") (dim=2|id=723|"1×1,1×2") NDTensors.EmptyStorage{NDTensors.EmptyNumber, NDTensors.Dense{NDTensors.EmptyNumber, Vector{NDTensors.EmptyNumber}}} julia> neighbors(tn, (1, 1)) @@ -135,8 +135,8 @@ and 1 edge(s): with vertex data: 2-element Dictionaries.Dictionary{Tuple{Int64, Int64}, Any} - (1, 1) │ ((dim=2|id=712|"1×1,2×1"), (dim=2|id=598|"1×1,1×2")) - (1, 2) │ ((dim=2|id=598|"1×1,1×2"), (dim=2|id=683|"1×2,2×2")) + (1, 1) │ ((dim=2|id=74|"1×1,2×1"), (dim=2|id=723|"1×1,1×2")) + (1, 2) │ ((dim=2|id=723|"1×1,1×2"), (dim=2|id=712|"1×2,2×2")) julia> tn_2 = subgraph(v -> v[1] == 2, tn) ITensorNetworks.ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices: @@ -149,8 +149,8 @@ and 1 edge(s): with vertex data: 2-element Dictionaries.Dictionary{Tuple{Int64, Int64}, Any} - (2, 1) │ ((dim=2|id=712|"1×1,2×1"), (dim=2|id=457|"2×1,2×2")) - (2, 2) │ ((dim=2|id=457|"2×1,2×2"), (dim=2|id=683|"1×2,2×2")) + (2, 1) │ ((dim=2|id=74|"1×1,2×1"), (dim=2|id=823|"2×1,2×2")) + (2, 2) │ ((dim=2|id=823|"2×1,2×2"), (dim=2|id=712|"1×2,2×2")) ``` @@ -176,9 +176,9 @@ and 2 edge(s): with vertex data: 3-element Dictionaries.Dictionary{Int64, Vector{ITensors.Index}} - 1 │ ITensors.Index[(dim=2|id=830|"S=1/2,Site,n=1")] - 2 │ ITensors.Index[(dim=2|id=369|"S=1/2,Site,n=2")] - 3 │ ITensors.Index[(dim=2|id=558|"S=1/2,Site,n=3")] + 1 │ ITensors.Index[(dim=2|id=683|"S=1/2,Site,n=1")] + 2 │ ITensors.Index[(dim=2|id=123|"S=1/2,Site,n=2")] + 3 │ ITensors.Index[(dim=2|id=656|"S=1/2,Site,n=3")] and edge data: 0-element Dictionaries.Dictionary{NamedGraphs.NamedEdge{Int64}, Vector{ITensors.Index}} @@ -196,9 +196,9 @@ and 2 edge(s): with vertex data: 3-element Dictionaries.Dictionary{Int64, Any} - 1 │ ((dim=2|id=830|"S=1/2,Site,n=1"), (dim=2|id=186|"1,2")) - 2 │ ((dim=2|id=369|"S=1/2,Site,n=2"), (dim=2|id=186|"1,2"), (dim=2|id=430|"2,3… - 3 │ ((dim=2|id=558|"S=1/2,Site,n=3"), (dim=2|id=430|"2,3")) + 1 │ ((dim=2|id=683|"S=1/2,Site,n=1"), (dim=2|id=382|"1,2")) + 2 │ ((dim=2|id=123|"S=1/2,Site,n=2"), (dim=2|id=382|"1,2"), (dim=2|id=190|"2,3… + 3 │ ((dim=2|id=656|"S=1/2,Site,n=3"), (dim=2|id=190|"2,3")) julia> tn2 = ITensorNetwork(s; link_space=2) ITensorNetworks.ITensorNetwork{Int64} with 3 vertices: @@ -213,9 +213,9 @@ and 2 edge(s): with vertex data: 3-element Dictionaries.Dictionary{Int64, Any} - 1 │ ((dim=2|id=830|"S=1/2,Site,n=1"), (dim=2|id=994|"1,2")) - 2 │ ((dim=2|id=369|"S=1/2,Site,n=2"), (dim=2|id=994|"1,2"), (dim=2|id=978|"2,3… - 3 │ ((dim=2|id=558|"S=1/2,Site,n=3"), (dim=2|id=978|"2,3")) + 1 │ ((dim=2|id=683|"S=1/2,Site,n=1"), (dim=2|id=934|"1,2")) + 2 │ ((dim=2|id=123|"S=1/2,Site,n=2"), (dim=2|id=934|"1,2"), (dim=2|id=614|"2,3… + 3 │ ((dim=2|id=656|"S=1/2,Site,n=3"), (dim=2|id=614|"2,3")) julia> @visualize tn1; ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀ diff --git a/src/ModelNetworks/ModelNetworks.jl b/src/ModelNetworks/ModelNetworks.jl index d23d6e60..41fde3b1 100644 --- a/src/ModelNetworks/ModelNetworks.jl +++ b/src/ModelNetworks/ModelNetworks.jl @@ -1,6 +1,6 @@ module ModelNetworks using Graphs: degree, dst, edges, src -using ..ITensorNetworks: IndsNetwork, delta_network, insert_missing_internal_inds, itensor +using ..ITensorNetworks: IndsNetwork, delta_network, insert_linkinds, itensor using ITensors: commoninds, diagITensor, inds, noprime using LinearAlgebra: Diagonal, eigen using NamedGraphs: NamedGraph @@ -17,7 +17,7 @@ OPTIONAL ARGUMENT: function ising_network( eltype::Type, s::IndsNetwork, beta::Number; h::Number=0.0, szverts=nothing ) - s = insert_missing_internal_inds(s, edges(s); internal_inds_space=2) + s = insert_linkinds(s; link_space=2) tn = delta_network(eltype, s) if (szverts != nothing) for v in szverts diff --git a/src/abstractindsnetwork.jl b/src/abstractindsnetwork.jl index 6b06be6e..db40d481 100644 --- a/src/abstractindsnetwork.jl +++ b/src/abstractindsnetwork.jl @@ -23,6 +23,47 @@ end # TODO: Define a generic fallback for `AbstractDataGraph`? DataGraphs.edge_data_type(::Type{<:AbstractIndsNetwork{V,I}}) where {V,I} = Vector{I} +function indsnetwork_getindex(is::AbstractIndsNetwork, index) + return get(data_graph(is), index, indtype(is)[]) +end + +function Base.getindex(is::AbstractIndsNetwork, index) + return indsnetwork_getindex(is, index) +end + +function Base.getindex(is::AbstractIndsNetwork, index::Pair) + return indsnetwork_getindex(is, index) +end + +function Base.getindex(is::AbstractIndsNetwork, index::AbstractEdge) + return indsnetwork_getindex(is, index) +end + +function indsnetwork_setindex!(is::AbstractIndsNetwork, value, index) + data_graph(is)[index] = value + return is +end + +function Base.setindex!(is::AbstractIndsNetwork, value, index) + indsnetwork_setindex!(is, value, index) + return is +end + +function Base.setindex!(is::AbstractIndsNetwork, value, index::Pair) + indsnetwork_setindex!(is, value, index) + return is +end + +function Base.setindex!(is::AbstractIndsNetwork, value, index::AbstractEdge) + indsnetwork_setindex!(is, value, index) + return is +end + +function Base.setindex!(is::AbstractIndsNetwork, value::Index, index) + indsnetwork_setindex!(is, value, index) + return is +end + # # Index access # @@ -39,8 +80,8 @@ function ITensors.uniqueinds(is::AbstractIndsNetwork, edge::Pair) return uniqueinds(is, edgetype(is)(edge)) end -function Base.union(tn1::AbstractIndsNetwork, tn2::AbstractIndsNetwork; kwargs...) - return IndsNetwork(union(data_graph(tn1), data_graph(tn2); kwargs...)) +function Base.union(is1::AbstractIndsNetwork, is2::AbstractIndsNetwork; kwargs...) + return IndsNetwork(union(data_graph(is1), data_graph(is2); kwargs...)) end function NamedGraphs.rename_vertices(f::Function, tn::AbstractIndsNetwork) @@ -51,31 +92,47 @@ end # Convenience functions # +function promote_indtypeof(is::AbstractIndsNetwork) + sitetype = mapreduce(promote_indtype, vertices(is); init=Index{Int}) do v + return mapreduce(typeof, promote_indtype, is[v]; init=Index{Int}) + end + linktype = mapreduce(promote_indtype, edges(is); init=Index{Int}) do e + return mapreduce(typeof, promote_indtype, is[e]; init=Index{Int}) + end + return promote_indtype(sitetype, linktype) +end + function union_all_inds(is_in::AbstractIndsNetwork...) @assert all(map(ug -> ug == underlying_graph(is_in[1]), underlying_graph.(is_in))) is_out = IndsNetwork(underlying_graph(is_in[1])) for v in vertices(is_out) + # TODO: Remove this check. if any(isassigned(is, v) for is in is_in) + # TODO: Change `get` to `getindex`. is_out[v] = unioninds([get(is, v, Index[]) for is in is_in]...) end end for e in edges(is_out) + # TODO: Remove this check. if any(isassigned(is, e) for is in is_in) + # TODO: Change `get` to `getindex`. is_out[e] = unioninds([get(is, e, Index[]) for is in is_in]...) end end return is_out end -function insert_missing_internal_inds( +function insert_linkinds( indsnetwork::AbstractIndsNetwork, edges=edges(indsnetwork); - internal_inds_space=trivial_space(indsnetwork), + link_space=trivial_space(indsnetwork), ) indsnetwork = copy(indsnetwork) for e in edges + # TODO: Change to check if it is empty. if !isassigned(indsnetwork, e) - iₑ = Index(internal_inds_space, edge_tag(e)) + iₑ = Index(link_space, edge_tag(e)) + # TODO: Allow setting with just `Index`. indsnetwork[e] = [iₑ] end end diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index ed57c940..e700ed6d 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -121,7 +121,7 @@ function Base.union(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork; kw # Add any new edges that are introduced during the union for v1 in vertices(tn1) for v2 in vertices(tn2) - if hascommoninds(tn[v1], tn[v2]) + if hascommoninds(tn, v1 => v2) add_edge!(tn, v1 => v2) end end @@ -174,6 +174,8 @@ function Base.Vector{ITensor}(tn::AbstractITensorNetwork) end # Convenience wrapper +# TODO: Delete this and just use `Vector{ITensor}`, or maybe +# it should output a dictionary or be called `eachtensor`? itensors(tn::AbstractITensorNetwork) = Vector{ITensor}(tn) # @@ -184,10 +186,13 @@ function LinearAlgebra.promote_leaf_eltypes(tn::AbstractITensorNetwork) return LinearAlgebra.promote_leaf_eltypes(itensors(tn)) end -function trivial_space(tn::AbstractITensorNetwork) - return trivial_space(tn[first(vertices(tn))]) +function promote_indtypeof(tn::AbstractITensorNetwork) + return mapreduce(promote_indtype, vertices(tn)) do v + return indtype(tn[v]) + end end +# TODO: Delete in favor of `scalartype`. function ITensors.promote_itensor_eltype(tn::AbstractITensorNetwork) return LinearAlgebra.promote_leaf_eltypes(tn) end @@ -871,14 +876,13 @@ function site_combiners(tn::AbstractITensorNetwork{V}) where {V} return Cs end -# TODO: Combine with `insert_links`. -function insert_missing_internal_inds( - tn::AbstractITensorNetwork, edges; internal_inds_space=trivial_space(tn) +function insert_linkinds( + tn::AbstractITensorNetwork, edges=edges(tn); link_space=trivial_space(tn) ) tn = copy(tn) for e in edges - if !hascommoninds(tn[src(e)], tn[dst(e)]) - iₑ = Index(internal_inds_space, edge_tag(e)) + if !hascommoninds(tn, e) + iₑ = Index(link_space, edge_tag(e)) X = onehot(iₑ => 1) tn[src(e)] *= X tn[dst(e)] *= dag(X) @@ -887,13 +891,6 @@ function insert_missing_internal_inds( return tn end -# TODO: Combine with `insert_links`. -function insert_missing_internal_inds( - tn::AbstractITensorNetwork; internal_inds_space=trivial_space(tn) -) - return insert_internal_inds(tn, edges(tn); internal_inds_space) -end - # TODO: What to output? Could be an `IndsNetwork`. Or maybe # that would be a different function `commonindsnetwork`. # Even in that case, this could output a `Dictionary` @@ -923,8 +920,8 @@ function ITensorMPS.add(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork if !issetequal(edges_tn1, edges_tn2) new_edges = union(edges_tn1, edges_tn2) - tn1 = insert_missing_internal_inds(tn1, new_edges) - tn2 = insert_missing_internal_inds(tn2, new_edges) + tn1 = insert_linkinds(tn1, new_edges) + tn2 = insert_linkinds(tn2, new_edges) end edges_tn1, edges_tn2 = edges(tn1), edges(tn2) diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index af9ab60b..82bda3a3 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -126,28 +126,26 @@ end # Construction from IndsNetwork # -function ITensorNetwork( - eltype::Type, undef::UndefInitializer, inds_network::IndsNetwork; kwargs... -) - return ITensorNetwork(inds_network; kwargs...) do v +function ITensorNetwork(eltype::Type, undef::UndefInitializer, is::IndsNetwork; kwargs...) + return ITensorNetwork(is; kwargs...) do v return (inds...) -> ITensor(eltype, undef, inds...) end end -function ITensorNetwork(eltype::Type, inds_network::IndsNetwork; kwargs...) - return ITensorNetwork(inds_network; kwargs...) do v +function ITensorNetwork(eltype::Type, is::IndsNetwork; kwargs...) + return ITensorNetwork(is; kwargs...) do v return (inds...) -> ITensor(eltype, inds...) end end -function ITensorNetwork(undef::UndefInitializer, inds_network::IndsNetwork; kwargs...) - return ITensorNetwork(inds_network; kwargs...) do v +function ITensorNetwork(undef::UndefInitializer, is::IndsNetwork; kwargs...) + return ITensorNetwork(is; kwargs...) do v return (inds...) -> ITensor(undef, inds...) end end -function ITensorNetwork(inds_network::IndsNetwork; kwargs...) - return ITensorNetwork(inds_network; kwargs...) do v +function ITensorNetwork(is::IndsNetwork; kwargs...) + return ITensorNetwork(is; kwargs...) do v return (inds...) -> ITensor(inds...) end end @@ -191,12 +189,14 @@ function to_callable(value::AbstractArray{<:Any,N}) where {N} end to_callable(value) = Returns(value) -function ITensorNetwork(value, inds_network::IndsNetwork; kwargs...) - return ITensorNetwork(to_callable(value), inds_network; kwargs...) +function ITensorNetwork(value, is::IndsNetwork; kwargs...) + return ITensorNetwork(to_callable(value), is; kwargs...) end -function ITensorNetwork(elt::Type, f, inds_network::IndsNetwork; link_space=1, kwargs...) - tn = ITensorNetwork(f, inds_network; kwargs...) +function ITensorNetwork( + elt::Type, f, is::IndsNetwork; link_space=trivial_space(is), kwargs... +) + tn = ITensorNetwork(f, is; kwargs...) for v in vertices(tn) # TODO: Ideally we would use broadcasting, i.e. `elt.(tn[v])`, # but that doesn't work right now on ITensors. @@ -206,48 +206,26 @@ function ITensorNetwork(elt::Type, f, inds_network::IndsNetwork; link_space=1, k end function ITensorNetwork( - itensor_constructor::Function, inds_network::IndsNetwork; link_space=1, kwargs... + itensor_constructor::Function, is::IndsNetwork; link_space=trivial_space(is), kwargs... ) - if isnothing(link_space) - # Make sure the link space is set - link_space = 1 - end - # Graphs.jl uses `zero` to create a graph of the same type - # without any vertices or edges. - inds_network_merge = typeof(inds_network)( - underlying_graph(inds_network); link_space, kwargs... - ) - inds_network = union(inds_network_merge, inds_network) - tn = ITensorNetwork{vertextype(inds_network)}() - for v in vertices(inds_network) + is = insert_linkinds(is; link_space) + tn = ITensorNetwork{vertextype(is)}() + for v in vertices(is) add_vertex!(tn, v) end - for e in edges(inds_network) + for e in edges(is) add_edge!(tn, e) end for v in vertices(tn) - siteinds = get(inds_network, v, indtype(inds_network)[]) - edges = [edgetype(inds_network)(v, nv) for nv in neighbors(inds_network, v)] - linkinds = map(e -> get(inds_network, e, indtype(inds_network)[]), Indices(edges)) + siteinds = is[v] + edges = [edgetype(is)(v, nv) for nv in neighbors(is, v)] + linkinds = map(e -> is[e], Indices(edges)) tensor_v = generic_state(itensor_constructor(v), (; siteinds, linkinds)) setindex_preserve_graph!(tn, tensor_v, v) end return tn end -# TODO: Remove this in favor of `insert_missing_internal_inds` -# or call it a different name, such as `factorize_edges`. -function insert_links(ψ::ITensorNetwork, edges::Vector=edges(ψ); cutoff=1e-15) - for e in edges - # Define this to work? - # ψ = factorize(ψ, e; cutoff) - ψᵥ₁, ψᵥ₂ = factorize(ψ[src(e)] * ψ[dst(e)], inds(ψ[src(e)]); cutoff, tags=edge_tag(e)) - ψ[src(e)] = ψᵥ₁ - ψ[dst(e)] = ψᵥ₂ - end - return ψ -end - ITensorNetwork(itns::Vector{ITensorNetwork}) = reduce(⊗, itns) function Base.Vector{ITensor}(ψ::ITensorNetwork) diff --git a/src/itensors.jl b/src/itensors.jl index a479fbaa..b47e4b0f 100644 --- a/src/itensors.jl +++ b/src/itensors.jl @@ -26,24 +26,50 @@ end # TODO: Move patch to `ITensors.jl`. ITensors._contract(As, index::Key) = As[index] -spacetype(::Type{Index}) = Int +indtype(a::ITensor) = promote_indtype(typeof.(inds(a))...) + +spacetype(::Index{T}) where {T} = T spacetype(::Type{<:Index{T}}) where {T} = T -spacetype(T::Type{<:Vector}) = spacetype(eltype(T)) -trivial_space(::Type{<:Integer}) = 1 -trivial_space(::Type{<:Pair{QN}}) = (QN() => 1) -trivial_space(T::Type{<:Vector{<:Pair{QN}}}) = [trivial_space(eltype(T))] +function promote_indtype(is::Vararg{Type{<:Index}}) + return reduce(promote_indtype_rule, is; init=Index{Int}) +end + +function promote_spacetype_rule(type1::Type, type2::Type) + return error("Not implemented") +end -_trivial_space(T::Type) = trivial_space(spacetype(T)) -_trivial_space(x::Any) = trivial_space(typeof(x)) +function promote_spacetype_rule( + type1::Type{<:Integer}, type2::Type{<:Vector{<:Pair{QN,T2}}} +) where {T2<:Integer} + return Vector{Pair{QN,promote_type(type1, T2)}} +end + +function promote_spacetype_rule( + type1::Type{<:Vector{<:Pair{QN,<:Integer}}}, type2::Type{<:Integer} +) + return promote_spacetype_rule(type2, type1) +end + +function promote_spacetype_rule( + type1::Type{<:Vector{<:Pair{QN,T1}}}, type2::Type{<:Vector{<:Pair{QN,T2}}} +) where {T1<:Integer,T2<:Integer} + return Vector{Pair{QN,promote_type(T1, T2)}} +end + +function promote_spacetype_rule(type1::Type{<:Integer}, type2::Type{<:Integer}) + return promote_type(type1, type2) +end + +function promote_indtype_rule(type1::Type{<:Index}, type2::Type{<:Index}) + return Index{promote_spacetype_rule(spacetype(type1), spacetype(type2))} +end -trivial_space(T::Type{<:Index}) = _trivial_space(T) -trivial_space(T::Type{<:Vector}) = _trivial_space(T) +trivial_space(x) = trivial_space(promote_indtypeof(x)) +trivial_space(x::Type) = trivial_space(promote_indtype(x)) -trivial_space(x::Index) = _trivial_space(x) -trivial_space(x::Vector{<:Index}) = _trivial_space(x) -trivial_space(x::ITensor) = trivial_space(inds(x)) -trivial_space(x::Tuple{Vararg{Index}}) = trivial_space(first(x)) +trivial_space(i::Type{<:Index{<:Integer}}) = 1 +trivial_space(i::Type{<:Index{<:Vector{<:Pair{<:QN,<:Integer}}}}) = [QN() => 1] """ Given an input tensor and a Dict (ind_to_newind), replace inds of tensor that are also diff --git a/src/specialitensornetworks.jl b/src/specialitensornetworks.jl index 015cab39..d36d4778 100644 --- a/src/specialitensornetworks.jl +++ b/src/specialitensornetworks.jl @@ -7,45 +7,43 @@ using Distributions: Distribution RETURN A TENSOR NETWORK WITH COPY TENSORS ON EACH VERTEX. Note that passing a link_space will mean the indices of the resulting network don't match those of the input indsnetwork """ -function delta_network(eltype::Type, s::IndsNetwork; link_space=nothing) - return ITensorNetwork(s; link_space) do v +function delta_network(eltype::Type, s::IndsNetwork; kwargs...) + return ITensorNetwork(s; kwargs...) do v return inds -> delta(eltype, inds) end end -function delta_network(s::IndsNetwork; link_space=nothing) - return delta_network(Float64, s; link_space) +function delta_network(s::IndsNetwork; kwargs...) + return delta_network(Float64, s; kwargs...) end -function delta_network(eltype::Type, graph::AbstractNamedGraph; link_space=nothing) - return delta_network(eltype, IndsNetwork(graph; link_space)) +function delta_network(eltype::Type, graph::AbstractNamedGraph; kwargs...) + return delta_network(eltype, IndsNetwork(graph; kwargs...)) end -function delta_network(graph::AbstractNamedGraph; link_space=nothing) - return delta_network(Float64, graph; link_space) +function delta_network(graph::AbstractNamedGraph; kwargs...) + return delta_network(Float64, graph; kwargs...) end """ Build an ITensor network on a graph specified by the inds network s. Bond_dim is given by link_space and entries are randomised (normal distribution, mean 0 std 1) """ -function random_tensornetwork(eltype::Type, s::IndsNetwork; link_space=nothing) - return ITensorNetwork(s; link_space) do v +function random_tensornetwork(eltype::Type, s::IndsNetwork; kwargs...) + return ITensorNetwork(s; kwargs...) do v return inds -> itensor(randn(eltype, dim.(inds)...), inds) end end -function random_tensornetwork(s::IndsNetwork; link_space=nothing) - return random_tensornetwork(Float64, s; link_space) +function random_tensornetwork(s::IndsNetwork; kwargs...) + return random_tensornetwork(Float64, s; kwargs...) end -@traitfn function random_tensornetwork( - eltype::Type, g::::IsUnderlyingGraph; link_space=nothing -) - return random_tensornetwork(eltype, IndsNetwork(g); link_space) +@traitfn function random_tensornetwork(eltype::Type, g::::IsUnderlyingGraph; kwargs...) + return random_tensornetwork(eltype, IndsNetwork(g); kwargs...) end -@traitfn function random_tensornetwork(g::::IsUnderlyingGraph; link_space=nothing) - return random_tensornetwork(Float64, IndsNetwork(g); link_space) +@traitfn function random_tensornetwork(g::::IsUnderlyingGraph; kwargs...) + return random_tensornetwork(Float64, IndsNetwork(g); kwargs...) end """ @@ -53,16 +51,14 @@ Build an ITensor network on a graph specified by the inds network s. Bond_dim is given by link_space and entries are randomized. The random distribution is based on the input argument `distribution`. """ -function random_tensornetwork( - distribution::Distribution, s::IndsNetwork; link_space=nothing -) - return ITensorNetwork(s; link_space) do v +function random_tensornetwork(distribution::Distribution, s::IndsNetwork; kwargs...) + return ITensorNetwork(s; kwargs...) do v return inds -> itensor(rand(distribution, dim.(inds)...), inds) end end @traitfn function random_tensornetwork( - distribution::Distribution, g::::IsUnderlyingGraph; link_space=nothing + distribution::Distribution, g::::IsUnderlyingGraph; kwargs... ) - return random_tensornetwork(distribution, IndsNetwork(g); link_space) + return random_tensornetwork(distribution, IndsNetwork(g); kwargs...) end diff --git a/src/tebd.jl b/src/tebd.jl index 00fe5f35..edf5a188 100644 --- a/src/tebd.jl +++ b/src/tebd.jl @@ -19,7 +19,7 @@ function tebd( if step % print_frequency == 0 @show step, (step - 1) * Δβ, β end - ψ = insert_links(ψ) + ψ = insert_linkinds(ψ) ψ = apply(u⃗, ψ; cutoff, maxdim, normalize=true, ortho, kwargs...) if ortho for v in vertices(ψ) diff --git a/src/treetensornetworks/ttn.jl b/src/treetensornetworks/ttn.jl index f547f470..50f4c387 100644 --- a/src/treetensornetworks/ttn.jl +++ b/src/treetensornetworks/ttn.jl @@ -1,3 +1,4 @@ +using Graphs: path_graph using ITensors: ITensor using NamedGraphs: vertextype @@ -85,3 +86,14 @@ function random_mps(args...; kwargs...) # TODO: Check it is a path graph. return random_tensornetwork(args...; kwargs...) end + +function random_mps(s::Vector{<:Index}; kwargs...) + g = path_graph(length(s)) + # TODO: Specify data type is `eltype(s)`. + is = IndsNetwork(g) + for v in vertices(is) + # TODO: Allow setting with just `s[v]`. + is[v] = [s[v]] + end + return random_tensornetwork(is; kwargs...) +end diff --git a/test/test_itensornetwork.jl b/test/test_itensornetwork.jl index c44f80da..d6547721 100644 --- a/test/test_itensornetwork.jl +++ b/test/test_itensornetwork.jl @@ -151,7 +151,7 @@ using Test: @test, @test_broken, @testset end @testset "Custom element type" for elt in (Float32, Float64, ComplexF32, ComplexF64), - link_space in (nothing, 3), + kwargs in ((;), (; link_space=3)), g in ( grid((4,)), named_grid((3, 3)), @@ -159,19 +159,19 @@ using Test: @test, @test_broken, @testset siteinds("S=1/2", named_grid((3, 3))), ) - ψ = ITensorNetwork(g; link_space) do v + ψ = ITensorNetwork(g; kwargs...) do v return inds -> itensor(randn(elt, dim.(inds)...), inds) end @test eltype(ψ[first(vertices(ψ))]) == elt - ψ = ITensorNetwork(g; link_space) do v + ψ = ITensorNetwork(g; kwargs...) do v return inds -> itensor(randn(dim.(inds)...), inds) end @test eltype(ψ[first(vertices(ψ))]) == Float64 - ψ = random_tensornetwork(elt, g; link_space) + ψ = random_tensornetwork(elt, g; kwargs...) @test eltype(ψ[first(vertices(ψ))]) == elt - ψ = random_tensornetwork(g; link_space) + ψ = random_tensornetwork(g; kwargs...) @test eltype(ψ[first(vertices(ψ))]) == Float64 - ψ = ITensorNetwork(elt, undef, g; link_space) + ψ = ITensorNetwork(elt, undef, g; kwargs...) @test eltype(ψ[first(vertices(ψ))]) == elt ψ = ITensorNetwork(undef, g) @test eltype(ψ[first(vertices(ψ))]) == Float64