Skip to content

Commit

Permalink
Absorb ITensors.ITensorMPS
Browse files Browse the repository at this point in the history
  • Loading branch information
mtfishman committed Oct 22, 2024
1 parent df0bab2 commit 51fc2e5
Show file tree
Hide file tree
Showing 110 changed files with 16,715 additions and 199 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,5 @@ Manifest.toml

# Vi/Vim backup files
.*.swp

.DS_Store
20 changes: 20 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,34 @@ authors = ["Matthew Fishman <[email protected]>", "Miles Stoudenmir
version = "0.2.6"

[deps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
ITensorTDVP = "25707e16-a4db-4a07-99d9-4d67b7af0342"
ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5"
IsApprox = "28f27b66-4bd8-47e7-9110-e2746eb8bed7"
KrylovKit = "0b1a1467-8014-51b9-945f-bf0ae24f4b77"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
SerializedElementArrays = "d3ce8812-9567-47e9-a7b5-65a6d70a3065"
TupleTools = "9d95972d-f1c8-5527-a6e0-b4b365fa01f6"

[compat]
Adapt = "4.1.0"
Compat = "4.16.0"
ITensorTDVP = "0.4.1"
ITensors = "0.6.7"
IsApprox = "2.0.0"
KrylovKit = "0.8.1"
LinearAlgebra = "1.11.0"
NDTensors = "0.3.46"
Printf = "1.11.0"
Random = "1.11.0"
Reexport = "1"
SerializedElementArrays = "0.1.0"
TupleTools = "1.5.0"
julia = "1.10"

[extras]
Expand Down
52 changes: 52 additions & 0 deletions examples/autodiff/circuit_optimization/op.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
using ITensors, ITensorMPS
using Zygote

s = siteind("Qubit")

f(x) = op("Ry", s; θ=x)[1, 1]

x = 0.2
@show f(x), cos(x / 2)
@show f'(x), -sin(x / 2) / 2

# Simple gate optimization
ψ0 = state(s, "0")
ψp = state(s, "+")

function loss(x)
U = op("Ry", s; θ=x)
Uψ0 = replaceprime(U * ψ0, 1 => 0)
return -(dag(ψp) * Uψ0)[]
end

# Extremely simple gradient descent implementation,
# where gradients are computing with automatic differentiation
# using Zygote.
function gradient_descent(f, x0; γ, nsteps, grad_tol)
@show γ, nsteps
x = x0
f_x = f(x)
∇f_x = f'(x)
step = 0
@show step, x, f_x, ∇f_x
for step in 1:nsteps
x -= γ * ∇f_x
f_x = f(x)
∇f_x = f'(x)
@show step, x, f_x, ∇f_x
if norm(∇f_x) grad_tol
break
end
end
return x, f_x, ∇f_x
end

x0 = 0
γ = 2.0 # Learning rate
nsteps = 30 # Number of steps of gradient descent
grad_tol = 1e-4 # Stop if gradient falls below this value
x, loss_x, ∇loss_x = gradient_descent(loss, x0; γ=γ, nsteps=nsteps, grad_tol=grad_tol)

@show x0, loss(x0)
@show x, loss(x)
@show π / 2, loss/ 2)
65 changes: 65 additions & 0 deletions examples/autodiff/circuit_optimization/state_preparation.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
using ITensors, ITensorMPS
using OptimKit
using Random
using Zygote

nsites = 20 # Number of sites
nlayers = 3 # Layers of gates in the ansatz
gradtol = 1e-4 # Tolerance for stopping gradient descent

# A layer of the circuit we want to optimize
function layer(nsites, θ⃗)
RY_layer = [("Ry", (n,), (θ=θ⃗[n],)) for n in 1:nsites]
CX_layer = [("CX", (n, n + 1)) for n in 1:2:(nsites - 1)]
return [RY_layer; CX_layer]
end

# The variational circuit we want to optimize
function variational_circuit(nsites, nlayers, θ⃗)
range = 1:nsites
circuit = layer(nsites, θ⃗[range])
for n in 1:(nlayers - 1)
circuit = [circuit; layer(nsites, θ⃗[range .+ n * nsites])]
end
return circuit
end

Random.seed!(1234)

θ⃗ᵗᵃʳᵍᵉᵗ = 2π * rand(nsites * nlayers)
𝒰ᵗᵃʳᵍᵉᵗ = variational_circuit(nsites, nlayers, θ⃗ᵗᵃʳᵍᵉᵗ)

s = siteinds("Qubit", nsites)
Uᵗᵃʳᵍᵉᵗ = ops(𝒰ᵗᵃʳᵍᵉᵗ, s)

ψ0 = MPS(s, "0")

# Create the random target state
ψᵗᵃʳᵍᵉᵗ = apply(Uᵗᵃʳᵍᵉᵗ, ψ0; cutoff=1e-8)

#
# The loss function, a function of the gate parameters
# and implicitly depending on the target state:
#
# loss(θ⃗) = -|⟨θ⃗ᵗᵃʳᵍᵉᵗ|U(θ⃗)|0⟩|² = -|⟨θ⃗ᵗᵃʳᵍᵉᵗ|θ⃗⟩|²
#
function loss(θ⃗)
nsites = length(ψ0)
s = siteinds(ψ0)
𝒰θ⃗ = variational_circuit(nsites, nlayers, θ⃗)
Uθ⃗ = ops(𝒰θ⃗, s)
ψθ⃗ = apply(Uθ⃗, ψ0)
return -abs(inner(ψᵗᵃʳᵍᵉᵗ, ψθ⃗))^2
end

θ⃗₀ = randn!(copy(θ⃗ᵗᵃʳᵍᵉᵗ))

@show loss(θ⃗₀), loss(θ⃗ᵗᵃʳᵍᵉᵗ)

loss_∇loss(x) = (loss(x), convert(Vector, loss'(x)))
algorithm = LBFGS(; gradtol=gradtol, verbosity=2)
θ⃗ₒₚₜ, lossₒₚₜ, ∇lossₒₚₜ, numfg, normgradhistory = optimize(loss_∇loss, θ⃗₀, algorithm)

@show loss(θ⃗ₒₚₜ), loss(θ⃗ᵗᵃʳᵍᵉᵗ)

nothing
81 changes: 81 additions & 0 deletions examples/autodiff/circuit_optimization/vqe.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
using ITensors, ITensorMPS
using OptimKit
using Random
using Zygote

nsites = 4 # Number of sites
nlayers = 2 # Layers of gates in the ansatz
gradtol = 1e-4 # Tolerance for stopping gradient descent

# The Hamiltonian we are minimizing
function ising_hamiltonian(nsites; h)
= OpSum()
for j in 1:(nsites - 1)
-= 1, "Z", j, "Z", j + 1
end
for j in 1:nsites
+= h, "X", j
end
return
end

# A layer of the circuit we want to optimize
function layer(nsites, θ⃗)
RY_layer = [("Ry", (n,), (θ=θ⃗[n],)) for n in 1:nsites]
CX_layer = [("CX", (n, n + 1)) for n in 1:2:(nsites - 1)]
return [RY_layer; CX_layer]
end

# The variational circuit we want to optimize
function variational_circuit(nsites, nlayers, θ⃗)
range = 1:nsites
circuit = layer(nsites, θ⃗[range])
for n in 1:(nlayers - 1)
circuit = [circuit; layer(nsites, θ⃗[range .+ n * nsites])]
end
return circuit
end

s = siteinds("Qubit", nsites)

h = 1.3
= ising_hamiltonian(nsites; h=h)
H = MPO(ℋ, s)
ψ0 = MPS(s, "0")

#
# The loss function, a function of the gate parameters
# and implicitly depending on the Hamiltonian and state:
#
# loss(θ⃗) = ⟨0|U(θ⃗)† H U(θ⃗)|0⟩ = ⟨θ⃗|H|θ⃗⟩
#
function loss(θ⃗)
nsites = length(ψ0)
s = siteinds(ψ0)
𝒰θ⃗ = variational_circuit(nsites, nlayers, θ⃗)
Uθ⃗ = ops(𝒰θ⃗, s)
ψθ⃗ = apply(Uθ⃗, ψ0; cutoff=1e-8)
return inner(ψθ⃗, H, ψθ⃗; cutoff=1e-8)
end

Random.seed!(1234)
θ⃗₀ = 2π * rand(nsites * nlayers)

@show loss(θ⃗₀)

println("\nOptimize circuit with gradient optimization")

loss_∇loss(x) = (loss(x), convert(Vector, loss'(x)))
algorithm = LBFGS(; gradtol=1e-3, verbosity=2)
θ⃗ₒₚₜ, lossₒₚₜ, ∇lossₒₚₜ, numfg, normgradhistory = optimize(loss_∇loss, θ⃗₀, algorithm)

@show loss(θ⃗ₒₚₜ)

println("\nRun DMRG as a comparison")

e_dmrg, ψ_dmrg = dmrg(H, ψ0; nsweeps=5, maxdim=10)

println("\nCompare variational circuit energy to DMRG energy")
@show loss(θ⃗ₒₚₜ), e_dmrg

nothing
49 changes: 49 additions & 0 deletions examples/autodiff/mps_autodiff.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
using ITensors, ITensorMPS
using OptimKit
using Zygote

function ising(n; J, h)
os = OpSum()
for j in 1:(n - 1)
os -= J, "Z", j, "Z", j + 1
end
for j in 1:n
os -= h, "X", j
end
return os
end

function loss(H, ψ)
n = length(ψ)
ψHψ = ITensor(1.0)
ψψ = ITensor(1.0)
for j in 1:n
ψHψ = ψHψ * dag(ψ[j]') * H[j] * ψ[j]
ψψ = ψψ * replaceinds(dag(ψ[j]'), s[j]' => s[j]) * ψ[j]
end
return ψHψ[] / ψψ[]
end

n = 10
s = siteinds("S=1/2", n)
J = 1.0
h = 0.5

# Loss function only works with `Vector{ITensor}`,
# extract with `ITensors.data`.
ψ0 = ITensors.data(random_mps(s; linkdims=10))
H = ITensors.data(MPO(ising(n; J, h), s))

loss(ψ) = loss(H, ψ)

optimizer = LBFGS(; maxiter=25, verbosity=2)
function loss_and_grad(x)
y, (∇,) = withgradient(loss, x)
return y, ∇
end
ψ, fs, gs, niter, normgradhistory = optimize(loss_and_grad, ψ0, optimizer)
Edmrg, ψdmrg = dmrg(MPO(H), MPS(ψ0); nsweeps=10, cutoff=1e-8)

@show loss(ψ0), norm(loss'(ψ0))
@show loss(ψ), norm(loss'(ψ))
@show loss(ITensors.data(ψdmrg)), norm(loss'(ITensors.data(ψdmrg)))
Loading

0 comments on commit 51fc2e5

Please sign in to comment.