Skip to content

Commit

Permalink
Merge branch 'master' of github.com:ZIB-IOL/FrankWolfe.jl into direct…
Browse files Browse the repository at this point in the history
…_solve_projection
  • Loading branch information
matbesancon committed Dec 30, 2024
2 parents 67544db + 54b30de commit 4182b94
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 29 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "FrankWolfe"
uuid = "f55ce6ea-fdc5-4628-88c5-0087fe54bd30"
authors = ["ZIB-IOL"]
version = "0.4.4"
version = "0.4.6"

[deps]
Arpack = "7d9fca2a-8960-54d3-9f78-7d1dccf2cb97"
Expand Down
48 changes: 20 additions & 28 deletions src/dicg.jl
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ function decomposition_invariant_conditional_gradient(
end

x = x0

if memory_mode isa InplaceEmphasis && !isa(x, Union{Array,SparseArrays.AbstractSparseArray})
# if integer, convert element type to most appropriate float
if eltype(x) <: Integer
Expand Down Expand Up @@ -117,7 +117,7 @@ function decomposition_invariant_conditional_gradient(
println("GRADIENstep_typeYPE: $grad_type LAZY: $lazy lazy_tolerance: $lazy_tolerance")
println("LMO: $(typeof(lmo))")
if memory_mode isa InplaceEmphasis
@info("In memory_mode memory iterates are wristep_typeen back into x0!")
@info("In memory_mode memory iterates are written back into x0!")
end
end

Expand All @@ -127,12 +127,12 @@ function decomposition_invariant_conditional_gradient(
gamma = one(phi)

if lazy
if extra_vertex_storage === nothing
v = compute_extreme_point(lmo, gradient, lazy = lazy)
pre_computed_set = [v]
else
pre_computed_set = extra_vertex_storage
end
if extra_vertex_storage === nothing
v = compute_extreme_point(lmo, gradient, lazy=lazy)
pre_computed_set = [v]
else
pre_computed_set = extra_vertex_storage
end
end

if linesearch_workspace === nothing
Expand Down Expand Up @@ -168,16 +168,8 @@ function decomposition_invariant_conditional_gradient(
end

if lazy
d, v, v_index, a, away_index, phi, step_type =
lazy_dicg_step(
x,
gradient,
lmo,
pre_computed_set,
phi,
epsilon,
d;
)
d, v, v_index, a, away_index, phi, step_type =
lazy_dicg_step(x, gradient, lmo, pre_computed_set, phi, epsilon, d;)
else # non-lazy, call the simple and modified
v = compute_extreme_point(lmo, gradient, lazy=lazy)
dual_gap = fast_dot(gradient, x) - fast_dot(gradient, v)
Expand Down Expand Up @@ -205,7 +197,7 @@ function decomposition_invariant_conditional_gradient(
push!(pre_computed_set, v)
end
end

if callback !== nothing
state = CallbackState(
t,
Expand All @@ -223,7 +215,7 @@ function decomposition_invariant_conditional_gradient(
gradient,
step_type,
)
if callback(state) === false
if callback(state, a, v) === false
break
end
end
Expand Down Expand Up @@ -259,7 +251,7 @@ function decomposition_invariant_conditional_gradient(
gradient,
step_type,
)
callback(state)
callback(state, nothing, v)
end
end
return (x=x, v=v, primal=primal, dual_gap=dual_gap, traj_data=traj_data)
Expand Down Expand Up @@ -352,7 +344,7 @@ function blended_decomposition_invariant_conditional_gradient(
println("GRADIENstep_typeYPE: $grad_type LAZY: $lazy lazy_tolerance: $lazy_tolerance")
println("LMO: $(typeof(lmo))")
if memory_mode isa InplaceEmphasis
@info("In memory_mode memory iterates are wristep_typeen back into x0!")
@info("In memory_mode memory iterates are written back into x0!")
end
end

Expand Down Expand Up @@ -441,7 +433,7 @@ function blended_decomposition_invariant_conditional_gradient(
gradient,
step_type,
)
if callback(state) === false
if callback(state, a, v) === false
break
end
end
Expand Down Expand Up @@ -477,7 +469,7 @@ function blended_decomposition_invariant_conditional_gradient(
gradient,
step_type,
)
callback(state)
callback(state, nothing, v)
end
end
return (x=x, v=v, primal=primal, dual_gap=dual_gap, traj_data=traj_data)
Expand Down Expand Up @@ -518,19 +510,19 @@ function lazy_dicg_step(
v = compute_extreme_point(lmo, gradient)
grad_dot_v = fast_dot(gradient, v)
# Do lazy inface_point
if grad_dot_a_local - grad_dot_v >= phi / lazy_tolerance &&
grad_dot_a_local - grad_dot_v >= epsilon
if grad_dot_a_local - grad_dot_v >= phi / lazy_tolerance &&
grad_dot_a_local - grad_dot_v >= epsilon
step_type = ST_LAZY
a = a_local
away_index = a_local_loc
else
a = compute_inface_extreme_point(lmo, NegatingArray(gradient), x)
end

# Real dual gap promises enough progress.
grad_dot_fw_vertex = fast_dot(v, gradient)
dual_gap = grad_dot_x - grad_dot_fw_vertex

if dual_gap >= phi / lazy_tolerance
d = muladd_memory_mode(memory_mode, d, a, v)
#Lower our expectation for progress.
Expand Down

0 comments on commit 4182b94

Please sign in to comment.